repo
stringlengths
7
55
path
stringlengths
4
127
func_name
stringlengths
1
88
original_string
stringlengths
75
19.8k
language
stringclasses
1 value
code
stringlengths
75
19.8k
code_tokens
list
docstring
stringlengths
3
17.3k
docstring_tokens
list
sha
stringlengths
40
40
url
stringlengths
87
242
partition
stringclasses
1 value
foremast/foremast
src/foremast/app/create_app.py
SpinnakerApp.retrieve_template
def retrieve_template(self): """Sets the instance links with pipeline_configs and then renders template files Returns: jsondata: A json objects containing templates """ links = self.retrieve_instance_links() self.log.debug('Links is \n%s', pformat(links)) self.pipeline_config['instance_links'].update(links) jsondata = get_template( template_file='infrastructure/app_data.json.j2', appinfo=self.appinfo, pipeline_config=self.pipeline_config, formats=self.generated, run_as_user=DEFAULT_RUN_AS_USER) self.log.debug('jsondata is %s', pformat(jsondata)) return jsondata
python
def retrieve_template(self): """Sets the instance links with pipeline_configs and then renders template files Returns: jsondata: A json objects containing templates """ links = self.retrieve_instance_links() self.log.debug('Links is \n%s', pformat(links)) self.pipeline_config['instance_links'].update(links) jsondata = get_template( template_file='infrastructure/app_data.json.j2', appinfo=self.appinfo, pipeline_config=self.pipeline_config, formats=self.generated, run_as_user=DEFAULT_RUN_AS_USER) self.log.debug('jsondata is %s', pformat(jsondata)) return jsondata
[ "def", "retrieve_template", "(", "self", ")", ":", "links", "=", "self", ".", "retrieve_instance_links", "(", ")", "self", ".", "log", ".", "debug", "(", "'Links is \\n%s'", ",", "pformat", "(", "links", ")", ")", "self", ".", "pipeline_config", "[", "'instance_links'", "]", ".", "update", "(", "links", ")", "jsondata", "=", "get_template", "(", "template_file", "=", "'infrastructure/app_data.json.j2'", ",", "appinfo", "=", "self", ".", "appinfo", ",", "pipeline_config", "=", "self", ".", "pipeline_config", ",", "formats", "=", "self", ".", "generated", ",", "run_as_user", "=", "DEFAULT_RUN_AS_USER", ")", "self", ".", "log", ".", "debug", "(", "'jsondata is %s'", ",", "pformat", "(", "jsondata", ")", ")", "return", "jsondata" ]
Sets the instance links with pipeline_configs and then renders template files Returns: jsondata: A json objects containing templates
[ "Sets", "the", "instance", "links", "with", "pipeline_configs", "and", "then", "renders", "template", "files" ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/app/create_app.py#L99-L115
train
foremast/foremast
src/foremast/app/create_app.py
SpinnakerApp.retrieve_instance_links
def retrieve_instance_links(self): """Appends on existing instance links Returns: instance_links: A dictionary containing all the instance links in LINKS and not in pipeline_config """ instance_links = {} self.log.debug("LINKS IS %s", LINKS) for key, value in LINKS.items(): if value not in self.pipeline_config['instance_links'].values(): instance_links[key] = value return instance_links
python
def retrieve_instance_links(self): """Appends on existing instance links Returns: instance_links: A dictionary containing all the instance links in LINKS and not in pipeline_config """ instance_links = {} self.log.debug("LINKS IS %s", LINKS) for key, value in LINKS.items(): if value not in self.pipeline_config['instance_links'].values(): instance_links[key] = value return instance_links
[ "def", "retrieve_instance_links", "(", "self", ")", ":", "instance_links", "=", "{", "}", "self", ".", "log", ".", "debug", "(", "\"LINKS IS %s\"", ",", "LINKS", ")", "for", "key", ",", "value", "in", "LINKS", ".", "items", "(", ")", ":", "if", "value", "not", "in", "self", ".", "pipeline_config", "[", "'instance_links'", "]", ".", "values", "(", ")", ":", "instance_links", "[", "key", "]", "=", "value", "return", "instance_links" ]
Appends on existing instance links Returns: instance_links: A dictionary containing all the instance links in LINKS and not in pipeline_config
[ "Appends", "on", "existing", "instance", "links" ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/app/create_app.py#L117-L128
train
foremast/foremast
src/foremast/utils/get_cloudwatch_event_rule.py
get_cloudwatch_event_rule
def get_cloudwatch_event_rule(app_name, account, region): """Get CloudWatch Event rule names.""" session = boto3.Session(profile_name=account, region_name=region) cloudwatch_client = session.client('events') lambda_alias_arn = get_lambda_alias_arn(app=app_name, account=account, region=region) rule_names = cloudwatch_client.list_rule_names_by_target(TargetArn=lambda_alias_arn) if rule_names['RuleNames']: all_rules = rule_names['RuleNames'] else: LOG.debug("No event rules found") all_rules = [] return all_rules
python
def get_cloudwatch_event_rule(app_name, account, region): """Get CloudWatch Event rule names.""" session = boto3.Session(profile_name=account, region_name=region) cloudwatch_client = session.client('events') lambda_alias_arn = get_lambda_alias_arn(app=app_name, account=account, region=region) rule_names = cloudwatch_client.list_rule_names_by_target(TargetArn=lambda_alias_arn) if rule_names['RuleNames']: all_rules = rule_names['RuleNames'] else: LOG.debug("No event rules found") all_rules = [] return all_rules
[ "def", "get_cloudwatch_event_rule", "(", "app_name", ",", "account", ",", "region", ")", ":", "session", "=", "boto3", ".", "Session", "(", "profile_name", "=", "account", ",", "region_name", "=", "region", ")", "cloudwatch_client", "=", "session", ".", "client", "(", "'events'", ")", "lambda_alias_arn", "=", "get_lambda_alias_arn", "(", "app", "=", "app_name", ",", "account", "=", "account", ",", "region", "=", "region", ")", "rule_names", "=", "cloudwatch_client", ".", "list_rule_names_by_target", "(", "TargetArn", "=", "lambda_alias_arn", ")", "if", "rule_names", "[", "'RuleNames'", "]", ":", "all_rules", "=", "rule_names", "[", "'RuleNames'", "]", "else", ":", "LOG", ".", "debug", "(", "\"No event rules found\"", ")", "all_rules", "=", "[", "]", "return", "all_rules" ]
Get CloudWatch Event rule names.
[ "Get", "CloudWatch", "Event", "rule", "names", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/utils/get_cloudwatch_event_rule.py#L11-L24
train
foremast/foremast
src/foremast/s3/s3deploy.py
S3Deployment.setup_pathing
def setup_pathing(self): """Format pathing for S3 deployments.""" self.s3_version_uri = self._path_formatter(self.version) self.s3_latest_uri = self._path_formatter("LATEST") self.s3_canary_uri = self._path_formatter("CANARY") self.s3_alpha_uri = self._path_formatter("ALPHA") self.s3_mirror_uri = self._path_formatter("MIRROR")
python
def setup_pathing(self): """Format pathing for S3 deployments.""" self.s3_version_uri = self._path_formatter(self.version) self.s3_latest_uri = self._path_formatter("LATEST") self.s3_canary_uri = self._path_formatter("CANARY") self.s3_alpha_uri = self._path_formatter("ALPHA") self.s3_mirror_uri = self._path_formatter("MIRROR")
[ "def", "setup_pathing", "(", "self", ")", ":", "self", ".", "s3_version_uri", "=", "self", ".", "_path_formatter", "(", "self", ".", "version", ")", "self", ".", "s3_latest_uri", "=", "self", ".", "_path_formatter", "(", "\"LATEST\"", ")", "self", ".", "s3_canary_uri", "=", "self", ".", "_path_formatter", "(", "\"CANARY\"", ")", "self", ".", "s3_alpha_uri", "=", "self", ".", "_path_formatter", "(", "\"ALPHA\"", ")", "self", ".", "s3_mirror_uri", "=", "self", ".", "_path_formatter", "(", "\"MIRROR\"", ")" ]
Format pathing for S3 deployments.
[ "Format", "pathing", "for", "S3", "deployments", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/s3/s3deploy.py#L69-L75
train
foremast/foremast
src/foremast/s3/s3deploy.py
S3Deployment._path_formatter
def _path_formatter(self, suffix): """Format the s3 path properly. Args: suffix (str): suffix to add on to an s3 path Returns: str: formatted path """ if suffix.lower() == "mirror": path_items = [self.bucket, self.s3path] else: path_items = [self.bucket, self.s3path, suffix] path = '/'.join(path_items) s3_format = "s3://{}" formatted_path = path.replace('//', '/') # removes configuration errors full_path = s3_format.format(formatted_path) return full_path
python
def _path_formatter(self, suffix): """Format the s3 path properly. Args: suffix (str): suffix to add on to an s3 path Returns: str: formatted path """ if suffix.lower() == "mirror": path_items = [self.bucket, self.s3path] else: path_items = [self.bucket, self.s3path, suffix] path = '/'.join(path_items) s3_format = "s3://{}" formatted_path = path.replace('//', '/') # removes configuration errors full_path = s3_format.format(formatted_path) return full_path
[ "def", "_path_formatter", "(", "self", ",", "suffix", ")", ":", "if", "suffix", ".", "lower", "(", ")", "==", "\"mirror\"", ":", "path_items", "=", "[", "self", ".", "bucket", ",", "self", ".", "s3path", "]", "else", ":", "path_items", "=", "[", "self", ".", "bucket", ",", "self", ".", "s3path", ",", "suffix", "]", "path", "=", "'/'", ".", "join", "(", "path_items", ")", "s3_format", "=", "\"s3://{}\"", "formatted_path", "=", "path", ".", "replace", "(", "'//'", ",", "'/'", ")", "# removes configuration errors", "full_path", "=", "s3_format", ".", "format", "(", "formatted_path", ")", "return", "full_path" ]
Format the s3 path properly. Args: suffix (str): suffix to add on to an s3 path Returns: str: formatted path
[ "Format", "the", "s3", "path", "properly", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/s3/s3deploy.py#L77-L96
train
foremast/foremast
src/foremast/s3/s3deploy.py
S3Deployment.upload_artifacts
def upload_artifacts(self): """Upload artifacts to S3 and copy to correct path depending on strategy.""" deploy_strategy = self.properties["deploy_strategy"] mirror = False if deploy_strategy == "mirror": mirror = True self._upload_artifacts_to_path(mirror=mirror) if deploy_strategy == "highlander": self._sync_to_uri(self.s3_latest_uri) elif deploy_strategy == "canary": self._sync_to_uri(self.s3_canary_uri) elif deploy_strategy == "alpha": self._sync_to_uri(self.s3_alpha_uri) elif deploy_strategy == "mirror": pass # Nothing extra needed for mirror deployments else: raise NotImplementedError
python
def upload_artifacts(self): """Upload artifacts to S3 and copy to correct path depending on strategy.""" deploy_strategy = self.properties["deploy_strategy"] mirror = False if deploy_strategy == "mirror": mirror = True self._upload_artifacts_to_path(mirror=mirror) if deploy_strategy == "highlander": self._sync_to_uri(self.s3_latest_uri) elif deploy_strategy == "canary": self._sync_to_uri(self.s3_canary_uri) elif deploy_strategy == "alpha": self._sync_to_uri(self.s3_alpha_uri) elif deploy_strategy == "mirror": pass # Nothing extra needed for mirror deployments else: raise NotImplementedError
[ "def", "upload_artifacts", "(", "self", ")", ":", "deploy_strategy", "=", "self", ".", "properties", "[", "\"deploy_strategy\"", "]", "mirror", "=", "False", "if", "deploy_strategy", "==", "\"mirror\"", ":", "mirror", "=", "True", "self", ".", "_upload_artifacts_to_path", "(", "mirror", "=", "mirror", ")", "if", "deploy_strategy", "==", "\"highlander\"", ":", "self", ".", "_sync_to_uri", "(", "self", ".", "s3_latest_uri", ")", "elif", "deploy_strategy", "==", "\"canary\"", ":", "self", ".", "_sync_to_uri", "(", "self", ".", "s3_canary_uri", ")", "elif", "deploy_strategy", "==", "\"alpha\"", ":", "self", ".", "_sync_to_uri", "(", "self", ".", "s3_alpha_uri", ")", "elif", "deploy_strategy", "==", "\"mirror\"", ":", "pass", "# Nothing extra needed for mirror deployments", "else", ":", "raise", "NotImplementedError" ]
Upload artifacts to S3 and copy to correct path depending on strategy.
[ "Upload", "artifacts", "to", "S3", "and", "copy", "to", "correct", "path", "depending", "on", "strategy", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/s3/s3deploy.py#L98-L116
train
foremast/foremast
src/foremast/s3/s3deploy.py
S3Deployment.promote_artifacts
def promote_artifacts(self, promote_stage='latest'): """Promote artifact version to dest. Args: promote_stage (string): Stage that is being promoted """ if promote_stage.lower() == 'alpha': self._sync_to_uri(self.s3_canary_uri) elif promote_stage.lower() == 'canary': self._sync_to_uri(self.s3_latest_uri) else: self._sync_to_uri(self.s3_latest_uri)
python
def promote_artifacts(self, promote_stage='latest'): """Promote artifact version to dest. Args: promote_stage (string): Stage that is being promoted """ if promote_stage.lower() == 'alpha': self._sync_to_uri(self.s3_canary_uri) elif promote_stage.lower() == 'canary': self._sync_to_uri(self.s3_latest_uri) else: self._sync_to_uri(self.s3_latest_uri)
[ "def", "promote_artifacts", "(", "self", ",", "promote_stage", "=", "'latest'", ")", ":", "if", "promote_stage", ".", "lower", "(", ")", "==", "'alpha'", ":", "self", ".", "_sync_to_uri", "(", "self", ".", "s3_canary_uri", ")", "elif", "promote_stage", ".", "lower", "(", ")", "==", "'canary'", ":", "self", ".", "_sync_to_uri", "(", "self", ".", "s3_latest_uri", ")", "else", ":", "self", ".", "_sync_to_uri", "(", "self", ".", "s3_latest_uri", ")" ]
Promote artifact version to dest. Args: promote_stage (string): Stage that is being promoted
[ "Promote", "artifact", "version", "to", "dest", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/s3/s3deploy.py#L118-L129
train
foremast/foremast
src/foremast/s3/s3deploy.py
S3Deployment._get_upload_cmd
def _get_upload_cmd(self, mirror=False): """Generate the S3 CLI upload command Args: mirror (bool): If true, uses a flat directory structure instead of nesting under a version. Returns: str: The full CLI command to run. """ if mirror: dest_uri = self.s3_mirror_uri else: dest_uri = self.s3_version_uri cmd = 'aws s3 sync {} {} --delete --exact-timestamps --profile {}'.format(self.artifact_path, dest_uri, self.env) return cmd
python
def _get_upload_cmd(self, mirror=False): """Generate the S3 CLI upload command Args: mirror (bool): If true, uses a flat directory structure instead of nesting under a version. Returns: str: The full CLI command to run. """ if mirror: dest_uri = self.s3_mirror_uri else: dest_uri = self.s3_version_uri cmd = 'aws s3 sync {} {} --delete --exact-timestamps --profile {}'.format(self.artifact_path, dest_uri, self.env) return cmd
[ "def", "_get_upload_cmd", "(", "self", ",", "mirror", "=", "False", ")", ":", "if", "mirror", ":", "dest_uri", "=", "self", ".", "s3_mirror_uri", "else", ":", "dest_uri", "=", "self", ".", "s3_version_uri", "cmd", "=", "'aws s3 sync {} {} --delete --exact-timestamps --profile {}'", ".", "format", "(", "self", ".", "artifact_path", ",", "dest_uri", ",", "self", ".", "env", ")", "return", "cmd" ]
Generate the S3 CLI upload command Args: mirror (bool): If true, uses a flat directory structure instead of nesting under a version. Returns: str: The full CLI command to run.
[ "Generate", "the", "S3", "CLI", "upload", "command" ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/s3/s3deploy.py#L131-L147
train
foremast/foremast
src/foremast/s3/s3deploy.py
S3Deployment._upload_artifacts_to_path
def _upload_artifacts_to_path(self, mirror=False): """Recursively upload directory contents to S3. Args: mirror (bool): If true, uses a flat directory structure instead of nesting under a version. """ if not os.listdir(self.artifact_path) or not self.artifact_path: raise S3ArtifactNotFound uploaded = False if self.s3props.get("content_metadata"): LOG.info("Uploading in multiple parts to set metadata") uploaded = self.content_metadata_uploads(mirror=mirror) if not uploaded: cmd = self._get_upload_cmd(mirror=mirror) result = subprocess.run(cmd, check=True, shell=True, stdout=subprocess.PIPE) LOG.debug("Upload Command Ouput: %s", result.stdout) LOG.info("Uploaded artifacts to %s bucket", self.bucket)
python
def _upload_artifacts_to_path(self, mirror=False): """Recursively upload directory contents to S3. Args: mirror (bool): If true, uses a flat directory structure instead of nesting under a version. """ if not os.listdir(self.artifact_path) or not self.artifact_path: raise S3ArtifactNotFound uploaded = False if self.s3props.get("content_metadata"): LOG.info("Uploading in multiple parts to set metadata") uploaded = self.content_metadata_uploads(mirror=mirror) if not uploaded: cmd = self._get_upload_cmd(mirror=mirror) result = subprocess.run(cmd, check=True, shell=True, stdout=subprocess.PIPE) LOG.debug("Upload Command Ouput: %s", result.stdout) LOG.info("Uploaded artifacts to %s bucket", self.bucket)
[ "def", "_upload_artifacts_to_path", "(", "self", ",", "mirror", "=", "False", ")", ":", "if", "not", "os", ".", "listdir", "(", "self", ".", "artifact_path", ")", "or", "not", "self", ".", "artifact_path", ":", "raise", "S3ArtifactNotFound", "uploaded", "=", "False", "if", "self", ".", "s3props", ".", "get", "(", "\"content_metadata\"", ")", ":", "LOG", ".", "info", "(", "\"Uploading in multiple parts to set metadata\"", ")", "uploaded", "=", "self", ".", "content_metadata_uploads", "(", "mirror", "=", "mirror", ")", "if", "not", "uploaded", ":", "cmd", "=", "self", ".", "_get_upload_cmd", "(", "mirror", "=", "mirror", ")", "result", "=", "subprocess", ".", "run", "(", "cmd", ",", "check", "=", "True", ",", "shell", "=", "True", ",", "stdout", "=", "subprocess", ".", "PIPE", ")", "LOG", ".", "debug", "(", "\"Upload Command Ouput: %s\"", ",", "result", ".", "stdout", ")", "LOG", ".", "info", "(", "\"Uploaded artifacts to %s bucket\"", ",", "self", ".", "bucket", ")" ]
Recursively upload directory contents to S3. Args: mirror (bool): If true, uses a flat directory structure instead of nesting under a version.
[ "Recursively", "upload", "directory", "contents", "to", "S3", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/s3/s3deploy.py#L149-L168
train
foremast/foremast
src/foremast/s3/s3deploy.py
S3Deployment.content_metadata_uploads
def content_metadata_uploads(self, mirror=False): """Finds all specified encoded directories and uploads in multiple parts, setting metadata for objects. Args: mirror (bool): If true, uses a flat directory structure instead of nesting under a version. Returns: bool: True if uploaded """ excludes_str = '' includes_cmds = [] cmd_base = self._get_upload_cmd(mirror=mirror) for content in self.s3props.get('content_metadata'): full_path = os.path.join(self.artifact_path, content['path']) if not os.listdir(full_path): raise S3ArtifactNotFound excludes_str += '--exclude "{}/*" '.format(content['path']) include_cmd = '{} --exclude "*", --include "{}/*"'.format(cmd_base, content['path']) include_cmd += ' --content-encoding {} --metadata-directive REPLACE'.format(content['content-encoding']) includes_cmds.append(include_cmd) exclude_cmd = '{} {}'.format(cmd_base, excludes_str) result = subprocess.run(exclude_cmd, check=True, shell=True, stdout=subprocess.PIPE) LOG.info("Uploaded files without metadata with command: %s", exclude_cmd) LOG.debug("Upload Command Output: %s", result.stdout) for include_cmd in includes_cmds: result = subprocess.run(include_cmd, check=True, shell=True, stdout=subprocess.PIPE) LOG.info("Uploaded files with metadata with command: %s", include_cmd) LOG.debug("Upload Command Output: %s", result.stdout) return True
python
def content_metadata_uploads(self, mirror=False): """Finds all specified encoded directories and uploads in multiple parts, setting metadata for objects. Args: mirror (bool): If true, uses a flat directory structure instead of nesting under a version. Returns: bool: True if uploaded """ excludes_str = '' includes_cmds = [] cmd_base = self._get_upload_cmd(mirror=mirror) for content in self.s3props.get('content_metadata'): full_path = os.path.join(self.artifact_path, content['path']) if not os.listdir(full_path): raise S3ArtifactNotFound excludes_str += '--exclude "{}/*" '.format(content['path']) include_cmd = '{} --exclude "*", --include "{}/*"'.format(cmd_base, content['path']) include_cmd += ' --content-encoding {} --metadata-directive REPLACE'.format(content['content-encoding']) includes_cmds.append(include_cmd) exclude_cmd = '{} {}'.format(cmd_base, excludes_str) result = subprocess.run(exclude_cmd, check=True, shell=True, stdout=subprocess.PIPE) LOG.info("Uploaded files without metadata with command: %s", exclude_cmd) LOG.debug("Upload Command Output: %s", result.stdout) for include_cmd in includes_cmds: result = subprocess.run(include_cmd, check=True, shell=True, stdout=subprocess.PIPE) LOG.info("Uploaded files with metadata with command: %s", include_cmd) LOG.debug("Upload Command Output: %s", result.stdout) return True
[ "def", "content_metadata_uploads", "(", "self", ",", "mirror", "=", "False", ")", ":", "excludes_str", "=", "''", "includes_cmds", "=", "[", "]", "cmd_base", "=", "self", ".", "_get_upload_cmd", "(", "mirror", "=", "mirror", ")", "for", "content", "in", "self", ".", "s3props", ".", "get", "(", "'content_metadata'", ")", ":", "full_path", "=", "os", ".", "path", ".", "join", "(", "self", ".", "artifact_path", ",", "content", "[", "'path'", "]", ")", "if", "not", "os", ".", "listdir", "(", "full_path", ")", ":", "raise", "S3ArtifactNotFound", "excludes_str", "+=", "'--exclude \"{}/*\" '", ".", "format", "(", "content", "[", "'path'", "]", ")", "include_cmd", "=", "'{} --exclude \"*\", --include \"{}/*\"'", ".", "format", "(", "cmd_base", ",", "content", "[", "'path'", "]", ")", "include_cmd", "+=", "' --content-encoding {} --metadata-directive REPLACE'", ".", "format", "(", "content", "[", "'content-encoding'", "]", ")", "includes_cmds", ".", "append", "(", "include_cmd", ")", "exclude_cmd", "=", "'{} {}'", ".", "format", "(", "cmd_base", ",", "excludes_str", ")", "result", "=", "subprocess", ".", "run", "(", "exclude_cmd", ",", "check", "=", "True", ",", "shell", "=", "True", ",", "stdout", "=", "subprocess", ".", "PIPE", ")", "LOG", ".", "info", "(", "\"Uploaded files without metadata with command: %s\"", ",", "exclude_cmd", ")", "LOG", ".", "debug", "(", "\"Upload Command Output: %s\"", ",", "result", ".", "stdout", ")", "for", "include_cmd", "in", "includes_cmds", ":", "result", "=", "subprocess", ".", "run", "(", "include_cmd", ",", "check", "=", "True", ",", "shell", "=", "True", ",", "stdout", "=", "subprocess", ".", "PIPE", ")", "LOG", ".", "info", "(", "\"Uploaded files with metadata with command: %s\"", ",", "include_cmd", ")", "LOG", ".", "debug", "(", "\"Upload Command Output: %s\"", ",", "result", ".", "stdout", ")", "return", "True" ]
Finds all specified encoded directories and uploads in multiple parts, setting metadata for objects. Args: mirror (bool): If true, uses a flat directory structure instead of nesting under a version. Returns: bool: True if uploaded
[ "Finds", "all", "specified", "encoded", "directories", "and", "uploads", "in", "multiple", "parts", "setting", "metadata", "for", "objects", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/s3/s3deploy.py#L170-L204
train
foremast/foremast
src/foremast/s3/s3deploy.py
S3Deployment._sync_to_uri
def _sync_to_uri(self, uri): """Copy and sync versioned directory to uri in S3. Args: uri (str): S3 URI to sync version to. """ cmd_cp = 'aws s3 cp {} {} --recursive --profile {}'.format(self.s3_version_uri, uri, self.env) # AWS CLI sync does not work as expected bucket to bucket with exact timestamp sync. cmd_sync = 'aws s3 sync {} {} --delete --exact-timestamps --profile {}'.format( self.s3_version_uri, uri, self.env) cp_result = subprocess.run(cmd_cp, check=True, shell=True, stdout=subprocess.PIPE) LOG.debug("Copy to %s before sync output: %s", uri, cp_result.stdout) LOG.info("Copied version %s to %s", self.version, uri) sync_result = subprocess.run(cmd_sync, check=True, shell=True, stdout=subprocess.PIPE) LOG.debug("Sync to %s command output: %s", uri, sync_result.stdout) LOG.info("Synced version %s to %s", self.version, uri)
python
def _sync_to_uri(self, uri): """Copy and sync versioned directory to uri in S3. Args: uri (str): S3 URI to sync version to. """ cmd_cp = 'aws s3 cp {} {} --recursive --profile {}'.format(self.s3_version_uri, uri, self.env) # AWS CLI sync does not work as expected bucket to bucket with exact timestamp sync. cmd_sync = 'aws s3 sync {} {} --delete --exact-timestamps --profile {}'.format( self.s3_version_uri, uri, self.env) cp_result = subprocess.run(cmd_cp, check=True, shell=True, stdout=subprocess.PIPE) LOG.debug("Copy to %s before sync output: %s", uri, cp_result.stdout) LOG.info("Copied version %s to %s", self.version, uri) sync_result = subprocess.run(cmd_sync, check=True, shell=True, stdout=subprocess.PIPE) LOG.debug("Sync to %s command output: %s", uri, sync_result.stdout) LOG.info("Synced version %s to %s", self.version, uri)
[ "def", "_sync_to_uri", "(", "self", ",", "uri", ")", ":", "cmd_cp", "=", "'aws s3 cp {} {} --recursive --profile {}'", ".", "format", "(", "self", ".", "s3_version_uri", ",", "uri", ",", "self", ".", "env", ")", "# AWS CLI sync does not work as expected bucket to bucket with exact timestamp sync.", "cmd_sync", "=", "'aws s3 sync {} {} --delete --exact-timestamps --profile {}'", ".", "format", "(", "self", ".", "s3_version_uri", ",", "uri", ",", "self", ".", "env", ")", "cp_result", "=", "subprocess", ".", "run", "(", "cmd_cp", ",", "check", "=", "True", ",", "shell", "=", "True", ",", "stdout", "=", "subprocess", ".", "PIPE", ")", "LOG", ".", "debug", "(", "\"Copy to %s before sync output: %s\"", ",", "uri", ",", "cp_result", ".", "stdout", ")", "LOG", ".", "info", "(", "\"Copied version %s to %s\"", ",", "self", ".", "version", ",", "uri", ")", "sync_result", "=", "subprocess", ".", "run", "(", "cmd_sync", ",", "check", "=", "True", ",", "shell", "=", "True", ",", "stdout", "=", "subprocess", ".", "PIPE", ")", "LOG", ".", "debug", "(", "\"Sync to %s command output: %s\"", ",", "uri", ",", "sync_result", ".", "stdout", ")", "LOG", ".", "info", "(", "\"Synced version %s to %s\"", ",", "self", ".", "version", ",", "uri", ")" ]
Copy and sync versioned directory to uri in S3. Args: uri (str): S3 URI to sync version to.
[ "Copy", "and", "sync", "versioned", "directory", "to", "uri", "in", "S3", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/s3/s3deploy.py#L206-L223
train
foremast/foremast
src/foremast/utils/vpc.py
get_vpc_id
def get_vpc_id(account, region): """Get VPC ID configured for ``account`` in ``region``. Args: account (str): AWS account name. region (str): Region name, e.g. us-east-1. Returns: str: VPC ID for the requested ``account`` in ``region``. Raises: :obj:`foremast.exceptions.SpinnakerVPCIDNotFound`: VPC ID not found for ``account`` in ``region``. :obj:`foremast.exceptions.SpinnakerVPCNotFound`: Spinnaker has no VPCs configured. """ url = '{0}/networks/aws'.format(API_URL) response = requests.get(url, verify=GATE_CA_BUNDLE, cert=GATE_CLIENT_CERT) if not response.ok: raise SpinnakerVPCNotFound(response.text) vpcs = response.json() for vpc in vpcs: LOG.debug('VPC: %(name)s, %(account)s, %(region)s => %(id)s', vpc) if 'name' in vpc and all([vpc['name'] == 'vpc', vpc['account'] == account, vpc['region'] == region]): LOG.info('Found VPC ID for %s in %s: %s', account, region, vpc['id']) vpc_id = vpc['id'] break else: LOG.fatal('VPC list: %s', vpcs) raise SpinnakerVPCIDNotFound('No VPC available for {0} [{1}].'.format(account, region)) return vpc_id
python
def get_vpc_id(account, region): """Get VPC ID configured for ``account`` in ``region``. Args: account (str): AWS account name. region (str): Region name, e.g. us-east-1. Returns: str: VPC ID for the requested ``account`` in ``region``. Raises: :obj:`foremast.exceptions.SpinnakerVPCIDNotFound`: VPC ID not found for ``account`` in ``region``. :obj:`foremast.exceptions.SpinnakerVPCNotFound`: Spinnaker has no VPCs configured. """ url = '{0}/networks/aws'.format(API_URL) response = requests.get(url, verify=GATE_CA_BUNDLE, cert=GATE_CLIENT_CERT) if not response.ok: raise SpinnakerVPCNotFound(response.text) vpcs = response.json() for vpc in vpcs: LOG.debug('VPC: %(name)s, %(account)s, %(region)s => %(id)s', vpc) if 'name' in vpc and all([vpc['name'] == 'vpc', vpc['account'] == account, vpc['region'] == region]): LOG.info('Found VPC ID for %s in %s: %s', account, region, vpc['id']) vpc_id = vpc['id'] break else: LOG.fatal('VPC list: %s', vpcs) raise SpinnakerVPCIDNotFound('No VPC available for {0} [{1}].'.format(account, region)) return vpc_id
[ "def", "get_vpc_id", "(", "account", ",", "region", ")", ":", "url", "=", "'{0}/networks/aws'", ".", "format", "(", "API_URL", ")", "response", "=", "requests", ".", "get", "(", "url", ",", "verify", "=", "GATE_CA_BUNDLE", ",", "cert", "=", "GATE_CLIENT_CERT", ")", "if", "not", "response", ".", "ok", ":", "raise", "SpinnakerVPCNotFound", "(", "response", ".", "text", ")", "vpcs", "=", "response", ".", "json", "(", ")", "for", "vpc", "in", "vpcs", ":", "LOG", ".", "debug", "(", "'VPC: %(name)s, %(account)s, %(region)s => %(id)s'", ",", "vpc", ")", "if", "'name'", "in", "vpc", "and", "all", "(", "[", "vpc", "[", "'name'", "]", "==", "'vpc'", ",", "vpc", "[", "'account'", "]", "==", "account", ",", "vpc", "[", "'region'", "]", "==", "region", "]", ")", ":", "LOG", ".", "info", "(", "'Found VPC ID for %s in %s: %s'", ",", "account", ",", "region", ",", "vpc", "[", "'id'", "]", ")", "vpc_id", "=", "vpc", "[", "'id'", "]", "break", "else", ":", "LOG", ".", "fatal", "(", "'VPC list: %s'", ",", "vpcs", ")", "raise", "SpinnakerVPCIDNotFound", "(", "'No VPC available for {0} [{1}].'", ".", "format", "(", "account", ",", "region", ")", ")", "return", "vpc_id" ]
Get VPC ID configured for ``account`` in ``region``. Args: account (str): AWS account name. region (str): Region name, e.g. us-east-1. Returns: str: VPC ID for the requested ``account`` in ``region``. Raises: :obj:`foremast.exceptions.SpinnakerVPCIDNotFound`: VPC ID not found for ``account`` in ``region``. :obj:`foremast.exceptions.SpinnakerVPCNotFound`: Spinnaker has no VPCs configured.
[ "Get", "VPC", "ID", "configured", "for", "account", "in", "region", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/utils/vpc.py#L27-L62
train
foremast/foremast
src/foremast/utils/subnets.py
get_subnets
def get_subnets( target='ec2', purpose='internal', env='', region='', ): """Get all availability zones for a given target. Args: target (str): Type of subnets to look up (ec2 or elb). env (str): Environment to look up. region (str): AWS Region to find Subnets for. Returns: az_dict: dictionary of availbility zones, structured like { $region: [ $avaibilityzones ] } or { $account: $region: [ $availabilityzone] } """ account_az_dict = defaultdict(defaultdict) subnet_id_dict = defaultdict(defaultdict) subnet_url = '{0}/subnets/aws'.format(API_URL) subnet_response = requests.get(subnet_url, verify=GATE_CA_BUNDLE, cert=GATE_CLIENT_CERT) if not subnet_response.ok: raise SpinnakerTimeout(subnet_response.text) subnet_list = subnet_response.json() for subnet in subnet_list: LOG.debug('Subnet: %(account)s\t%(region)s\t%(target)s\t%(vpcId)s\t' '%(availabilityZone)s', subnet) if subnet.get('target', '') == target: availability_zone = subnet['availabilityZone'] account = subnet['account'] subnet_region = subnet['region'] subnet_id = subnet['id'] try: if availability_zone not in account_az_dict[account][subnet_region]: account_az_dict[account][subnet_region].append(availability_zone) except KeyError: account_az_dict[account][subnet_region] = [availability_zone] # get list of all subnet IDs with correct purpose if subnet['purpose'] == purpose: try: subnet_id_dict[account][subnet_region].append(subnet_id) except KeyError: subnet_id_dict[account][subnet_region] = [subnet_id] LOG.debug('%s regions: %s', account, list(account_az_dict[account].keys())) if all([env, region]): try: region_dict = {region: account_az_dict[env][region]} region_dict['subnet_ids'] = {region: subnet_id_dict[env][region]} LOG.debug('Region dict: %s', region_dict) return region_dict except KeyError: raise SpinnakerSubnetError(env=env, region=region) LOG.debug('AZ dict:\n%s', pformat(dict(account_az_dict))) return account_az_dict
python
def get_subnets( target='ec2', purpose='internal', env='', region='', ): """Get all availability zones for a given target. Args: target (str): Type of subnets to look up (ec2 or elb). env (str): Environment to look up. region (str): AWS Region to find Subnets for. Returns: az_dict: dictionary of availbility zones, structured like { $region: [ $avaibilityzones ] } or { $account: $region: [ $availabilityzone] } """ account_az_dict = defaultdict(defaultdict) subnet_id_dict = defaultdict(defaultdict) subnet_url = '{0}/subnets/aws'.format(API_URL) subnet_response = requests.get(subnet_url, verify=GATE_CA_BUNDLE, cert=GATE_CLIENT_CERT) if not subnet_response.ok: raise SpinnakerTimeout(subnet_response.text) subnet_list = subnet_response.json() for subnet in subnet_list: LOG.debug('Subnet: %(account)s\t%(region)s\t%(target)s\t%(vpcId)s\t' '%(availabilityZone)s', subnet) if subnet.get('target', '') == target: availability_zone = subnet['availabilityZone'] account = subnet['account'] subnet_region = subnet['region'] subnet_id = subnet['id'] try: if availability_zone not in account_az_dict[account][subnet_region]: account_az_dict[account][subnet_region].append(availability_zone) except KeyError: account_az_dict[account][subnet_region] = [availability_zone] # get list of all subnet IDs with correct purpose if subnet['purpose'] == purpose: try: subnet_id_dict[account][subnet_region].append(subnet_id) except KeyError: subnet_id_dict[account][subnet_region] = [subnet_id] LOG.debug('%s regions: %s', account, list(account_az_dict[account].keys())) if all([env, region]): try: region_dict = {region: account_az_dict[env][region]} region_dict['subnet_ids'] = {region: subnet_id_dict[env][region]} LOG.debug('Region dict: %s', region_dict) return region_dict except KeyError: raise SpinnakerSubnetError(env=env, region=region) LOG.debug('AZ dict:\n%s', pformat(dict(account_az_dict))) return account_az_dict
[ "def", "get_subnets", "(", "target", "=", "'ec2'", ",", "purpose", "=", "'internal'", ",", "env", "=", "''", ",", "region", "=", "''", ",", ")", ":", "account_az_dict", "=", "defaultdict", "(", "defaultdict", ")", "subnet_id_dict", "=", "defaultdict", "(", "defaultdict", ")", "subnet_url", "=", "'{0}/subnets/aws'", ".", "format", "(", "API_URL", ")", "subnet_response", "=", "requests", ".", "get", "(", "subnet_url", ",", "verify", "=", "GATE_CA_BUNDLE", ",", "cert", "=", "GATE_CLIENT_CERT", ")", "if", "not", "subnet_response", ".", "ok", ":", "raise", "SpinnakerTimeout", "(", "subnet_response", ".", "text", ")", "subnet_list", "=", "subnet_response", ".", "json", "(", ")", "for", "subnet", "in", "subnet_list", ":", "LOG", ".", "debug", "(", "'Subnet: %(account)s\\t%(region)s\\t%(target)s\\t%(vpcId)s\\t'", "'%(availabilityZone)s'", ",", "subnet", ")", "if", "subnet", ".", "get", "(", "'target'", ",", "''", ")", "==", "target", ":", "availability_zone", "=", "subnet", "[", "'availabilityZone'", "]", "account", "=", "subnet", "[", "'account'", "]", "subnet_region", "=", "subnet", "[", "'region'", "]", "subnet_id", "=", "subnet", "[", "'id'", "]", "try", ":", "if", "availability_zone", "not", "in", "account_az_dict", "[", "account", "]", "[", "subnet_region", "]", ":", "account_az_dict", "[", "account", "]", "[", "subnet_region", "]", ".", "append", "(", "availability_zone", ")", "except", "KeyError", ":", "account_az_dict", "[", "account", "]", "[", "subnet_region", "]", "=", "[", "availability_zone", "]", "# get list of all subnet IDs with correct purpose", "if", "subnet", "[", "'purpose'", "]", "==", "purpose", ":", "try", ":", "subnet_id_dict", "[", "account", "]", "[", "subnet_region", "]", ".", "append", "(", "subnet_id", ")", "except", "KeyError", ":", "subnet_id_dict", "[", "account", "]", "[", "subnet_region", "]", "=", "[", "subnet_id", "]", "LOG", ".", "debug", "(", "'%s regions: %s'", ",", "account", ",", "list", "(", "account_az_dict", "[", "account", "]", ".", "keys", "(", ")", ")", ")", "if", "all", "(", "[", "env", ",", "region", "]", ")", ":", "try", ":", "region_dict", "=", "{", "region", ":", "account_az_dict", "[", "env", "]", "[", "region", "]", "}", "region_dict", "[", "'subnet_ids'", "]", "=", "{", "region", ":", "subnet_id_dict", "[", "env", "]", "[", "region", "]", "}", "LOG", ".", "debug", "(", "'Region dict: %s'", ",", "region_dict", ")", "return", "region_dict", "except", "KeyError", ":", "raise", "SpinnakerSubnetError", "(", "env", "=", "env", ",", "region", "=", "region", ")", "LOG", ".", "debug", "(", "'AZ dict:\\n%s'", ",", "pformat", "(", "dict", "(", "account_az_dict", ")", ")", ")", "return", "account_az_dict" ]
Get all availability zones for a given target. Args: target (str): Type of subnets to look up (ec2 or elb). env (str): Environment to look up. region (str): AWS Region to find Subnets for. Returns: az_dict: dictionary of availbility zones, structured like { $region: [ $avaibilityzones ] } or { $account: $region: [ $availabilityzone] }
[ "Get", "all", "availability", "zones", "for", "a", "given", "target", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/utils/subnets.py#L32-L93
train
foremast/foremast
src/foremast/awslambda/awslambdaevent.py
LambdaEvent.create_lambda_events
def create_lambda_events(self): """Create all defined lambda events for an lambda application.""" # Clean up lambda permissions before creating triggers remove_all_lambda_permissions(app_name=self.app_name, env=self.env, region=self.region) triggers = self.properties['lambda_triggers'] for trigger in triggers: if trigger['type'] == 'sns': create_sns_event(app_name=self.app_name, env=self.env, region=self.region, rules=trigger) if trigger['type'] == 'cloudwatch-event': create_cloudwatch_event(app_name=self.app_name, env=self.env, region=self.region, rules=trigger) if trigger['type'] == 'cloudwatch-logs': create_cloudwatch_log_event(app_name=self.app_name, env=self.env, region=self.region, rules=trigger) if trigger['type'] == 'api-gateway': apigateway = APIGateway( app=self.app_name, env=self.env, region=self.region, rules=trigger, prop_path=self.prop_path) apigateway.setup_lambda_api() # filter all triggers to isolate s3 triggers so we can operate on the entire group s3_triggers = [x for x in triggers if x['type'] == 's3'] # group triggers by unique target bucket bucket_triggers = dict() for s3_trigger in s3_triggers: bucket = s3_trigger.get('bucket') if bucket in bucket_triggers: bucket_triggers[bucket].append(s3_trigger) else: bucket_triggers[bucket] = [s3_trigger] # apply relevant triggers to each respective bucket all at once. for bucket, triggers in bucket_triggers.items(): create_s3_event(app_name=self.app_name, env=self.env, region=self.region, bucket=bucket, triggers=triggers)
python
def create_lambda_events(self): """Create all defined lambda events for an lambda application.""" # Clean up lambda permissions before creating triggers remove_all_lambda_permissions(app_name=self.app_name, env=self.env, region=self.region) triggers = self.properties['lambda_triggers'] for trigger in triggers: if trigger['type'] == 'sns': create_sns_event(app_name=self.app_name, env=self.env, region=self.region, rules=trigger) if trigger['type'] == 'cloudwatch-event': create_cloudwatch_event(app_name=self.app_name, env=self.env, region=self.region, rules=trigger) if trigger['type'] == 'cloudwatch-logs': create_cloudwatch_log_event(app_name=self.app_name, env=self.env, region=self.region, rules=trigger) if trigger['type'] == 'api-gateway': apigateway = APIGateway( app=self.app_name, env=self.env, region=self.region, rules=trigger, prop_path=self.prop_path) apigateway.setup_lambda_api() # filter all triggers to isolate s3 triggers so we can operate on the entire group s3_triggers = [x for x in triggers if x['type'] == 's3'] # group triggers by unique target bucket bucket_triggers = dict() for s3_trigger in s3_triggers: bucket = s3_trigger.get('bucket') if bucket in bucket_triggers: bucket_triggers[bucket].append(s3_trigger) else: bucket_triggers[bucket] = [s3_trigger] # apply relevant triggers to each respective bucket all at once. for bucket, triggers in bucket_triggers.items(): create_s3_event(app_name=self.app_name, env=self.env, region=self.region, bucket=bucket, triggers=triggers)
[ "def", "create_lambda_events", "(", "self", ")", ":", "# Clean up lambda permissions before creating triggers", "remove_all_lambda_permissions", "(", "app_name", "=", "self", ".", "app_name", ",", "env", "=", "self", ".", "env", ",", "region", "=", "self", ".", "region", ")", "triggers", "=", "self", ".", "properties", "[", "'lambda_triggers'", "]", "for", "trigger", "in", "triggers", ":", "if", "trigger", "[", "'type'", "]", "==", "'sns'", ":", "create_sns_event", "(", "app_name", "=", "self", ".", "app_name", ",", "env", "=", "self", ".", "env", ",", "region", "=", "self", ".", "region", ",", "rules", "=", "trigger", ")", "if", "trigger", "[", "'type'", "]", "==", "'cloudwatch-event'", ":", "create_cloudwatch_event", "(", "app_name", "=", "self", ".", "app_name", ",", "env", "=", "self", ".", "env", ",", "region", "=", "self", ".", "region", ",", "rules", "=", "trigger", ")", "if", "trigger", "[", "'type'", "]", "==", "'cloudwatch-logs'", ":", "create_cloudwatch_log_event", "(", "app_name", "=", "self", ".", "app_name", ",", "env", "=", "self", ".", "env", ",", "region", "=", "self", ".", "region", ",", "rules", "=", "trigger", ")", "if", "trigger", "[", "'type'", "]", "==", "'api-gateway'", ":", "apigateway", "=", "APIGateway", "(", "app", "=", "self", ".", "app_name", ",", "env", "=", "self", ".", "env", ",", "region", "=", "self", ".", "region", ",", "rules", "=", "trigger", ",", "prop_path", "=", "self", ".", "prop_path", ")", "apigateway", ".", "setup_lambda_api", "(", ")", "# filter all triggers to isolate s3 triggers so we can operate on the entire group", "s3_triggers", "=", "[", "x", "for", "x", "in", "triggers", "if", "x", "[", "'type'", "]", "==", "'s3'", "]", "# group triggers by unique target bucket", "bucket_triggers", "=", "dict", "(", ")", "for", "s3_trigger", "in", "s3_triggers", ":", "bucket", "=", "s3_trigger", ".", "get", "(", "'bucket'", ")", "if", "bucket", "in", "bucket_triggers", ":", "bucket_triggers", "[", "bucket", "]", ".", "append", "(", "s3_trigger", ")", "else", ":", "bucket_triggers", "[", "bucket", "]", "=", "[", "s3_trigger", "]", "# apply relevant triggers to each respective bucket all at once.", "for", "bucket", ",", "triggers", "in", "bucket_triggers", ".", "items", "(", ")", ":", "create_s3_event", "(", "app_name", "=", "self", ".", "app_name", ",", "env", "=", "self", ".", "env", ",", "region", "=", "self", ".", "region", ",", "bucket", "=", "bucket", ",", "triggers", "=", "triggers", ")" ]
Create all defined lambda events for an lambda application.
[ "Create", "all", "defined", "lambda", "events", "for", "an", "lambda", "application", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/awslambda/awslambdaevent.py#L45-L83
train
foremast/foremast
src/foremast/utils/pipelines.py
get_all_pipelines
def get_all_pipelines(app=''): """Get a list of all the Pipelines in _app_. Args: app (str): Name of Spinnaker Application. Returns: requests.models.Response: Response from Gate containing Pipelines. """ url = '{host}/applications/{app}/pipelineConfigs'.format(host=API_URL, app=app) response = requests.get(url, verify=GATE_CA_BUNDLE, cert=GATE_CLIENT_CERT) assert response.ok, 'Could not retrieve Pipelines for {0}.'.format(app) pipelines = response.json() LOG.debug('Pipelines:\n%s', pipelines) return pipelines
python
def get_all_pipelines(app=''): """Get a list of all the Pipelines in _app_. Args: app (str): Name of Spinnaker Application. Returns: requests.models.Response: Response from Gate containing Pipelines. """ url = '{host}/applications/{app}/pipelineConfigs'.format(host=API_URL, app=app) response = requests.get(url, verify=GATE_CA_BUNDLE, cert=GATE_CLIENT_CERT) assert response.ok, 'Could not retrieve Pipelines for {0}.'.format(app) pipelines = response.json() LOG.debug('Pipelines:\n%s', pipelines) return pipelines
[ "def", "get_all_pipelines", "(", "app", "=", "''", ")", ":", "url", "=", "'{host}/applications/{app}/pipelineConfigs'", ".", "format", "(", "host", "=", "API_URL", ",", "app", "=", "app", ")", "response", "=", "requests", ".", "get", "(", "url", ",", "verify", "=", "GATE_CA_BUNDLE", ",", "cert", "=", "GATE_CLIENT_CERT", ")", "assert", "response", ".", "ok", ",", "'Could not retrieve Pipelines for {0}.'", ".", "format", "(", "app", ")", "pipelines", "=", "response", ".", "json", "(", ")", "LOG", ".", "debug", "(", "'Pipelines:\\n%s'", ",", "pipelines", ")", "return", "pipelines" ]
Get a list of all the Pipelines in _app_. Args: app (str): Name of Spinnaker Application. Returns: requests.models.Response: Response from Gate containing Pipelines.
[ "Get", "a", "list", "of", "all", "the", "Pipelines", "in", "_app_", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/utils/pipelines.py#L64-L82
train
foremast/foremast
src/foremast/utils/pipelines.py
get_pipeline_id
def get_pipeline_id(app='', name=''): """Get the ID for Pipeline _name_. Args: app (str): Name of Spinnaker Application to search. name (str): Name of Pipeline to get ID for. Returns: str: ID of specified Pipeline. None: Pipeline or Spinnaker Appliation not found. """ return_id = None pipelines = get_all_pipelines(app=app) for pipeline in pipelines: LOG.debug('ID of %(name)s: %(id)s', pipeline) if pipeline['name'] == name: return_id = pipeline['id'] LOG.info('Pipeline %s found, ID: %s', name, return_id) break return return_id
python
def get_pipeline_id(app='', name=''): """Get the ID for Pipeline _name_. Args: app (str): Name of Spinnaker Application to search. name (str): Name of Pipeline to get ID for. Returns: str: ID of specified Pipeline. None: Pipeline or Spinnaker Appliation not found. """ return_id = None pipelines = get_all_pipelines(app=app) for pipeline in pipelines: LOG.debug('ID of %(name)s: %(id)s', pipeline) if pipeline['name'] == name: return_id = pipeline['id'] LOG.info('Pipeline %s found, ID: %s', name, return_id) break return return_id
[ "def", "get_pipeline_id", "(", "app", "=", "''", ",", "name", "=", "''", ")", ":", "return_id", "=", "None", "pipelines", "=", "get_all_pipelines", "(", "app", "=", "app", ")", "for", "pipeline", "in", "pipelines", ":", "LOG", ".", "debug", "(", "'ID of %(name)s: %(id)s'", ",", "pipeline", ")", "if", "pipeline", "[", "'name'", "]", "==", "name", ":", "return_id", "=", "pipeline", "[", "'id'", "]", "LOG", ".", "info", "(", "'Pipeline %s found, ID: %s'", ",", "name", ",", "return_id", ")", "break", "return", "return_id" ]
Get the ID for Pipeline _name_. Args: app (str): Name of Spinnaker Application to search. name (str): Name of Pipeline to get ID for. Returns: str: ID of specified Pipeline. None: Pipeline or Spinnaker Appliation not found.
[ "Get", "the", "ID", "for", "Pipeline", "_name_", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/utils/pipelines.py#L85-L109
train
foremast/foremast
src/foremast/utils/pipelines.py
normalize_pipeline_name
def normalize_pipeline_name(name=''): """Translate unsafe characters to underscores.""" normalized_name = name for bad in '\\/?%#': normalized_name = normalized_name.replace(bad, '_') return normalized_name
python
def normalize_pipeline_name(name=''): """Translate unsafe characters to underscores.""" normalized_name = name for bad in '\\/?%#': normalized_name = normalized_name.replace(bad, '_') return normalized_name
[ "def", "normalize_pipeline_name", "(", "name", "=", "''", ")", ":", "normalized_name", "=", "name", "for", "bad", "in", "'\\\\/?%#'", ":", "normalized_name", "=", "normalized_name", ".", "replace", "(", "bad", ",", "'_'", ")", "return", "normalized_name" ]
Translate unsafe characters to underscores.
[ "Translate", "unsafe", "characters", "to", "underscores", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/utils/pipelines.py#L112-L117
train
foremast/foremast
src/foremast/utils/apps.py
get_all_apps
def get_all_apps(): """Get a list of all applications in Spinnaker. Returns: requests.models.Response: Response from Gate containing list of all apps. """ LOG.info('Retreiving list of all Spinnaker applications') url = '{}/applications'.format(API_URL) response = requests.get(url, verify=GATE_CA_BUNDLE, cert=GATE_CLIENT_CERT) assert response.ok, 'Could not retrieve application list' pipelines = response.json() LOG.debug('All Applications:\n%s', pipelines) return pipelines
python
def get_all_apps(): """Get a list of all applications in Spinnaker. Returns: requests.models.Response: Response from Gate containing list of all apps. """ LOG.info('Retreiving list of all Spinnaker applications') url = '{}/applications'.format(API_URL) response = requests.get(url, verify=GATE_CA_BUNDLE, cert=GATE_CLIENT_CERT) assert response.ok, 'Could not retrieve application list' pipelines = response.json() LOG.debug('All Applications:\n%s', pipelines) return pipelines
[ "def", "get_all_apps", "(", ")", ":", "LOG", ".", "info", "(", "'Retreiving list of all Spinnaker applications'", ")", "url", "=", "'{}/applications'", ".", "format", "(", "API_URL", ")", "response", "=", "requests", ".", "get", "(", "url", ",", "verify", "=", "GATE_CA_BUNDLE", ",", "cert", "=", "GATE_CLIENT_CERT", ")", "assert", "response", ".", "ok", ",", "'Could not retrieve application list'", "pipelines", "=", "response", ".", "json", "(", ")", "LOG", ".", "debug", "(", "'All Applications:\\n%s'", ",", "pipelines", ")", "return", "pipelines" ]
Get a list of all applications in Spinnaker. Returns: requests.models.Response: Response from Gate containing list of all apps.
[ "Get", "a", "list", "of", "all", "applications", "in", "Spinnaker", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/utils/apps.py#L28-L44
train
foremast/foremast
src/foremast/utils/apps.py
get_details
def get_details(app='groupproject', env='dev', region='us-east-1'): """Extract details for Application. Args: app (str): Application Name env (str): Environment/account to get details from Returns: collections.namedtuple with _group_, _policy_, _profile_, _role_, _user_. """ url = '{host}/applications/{app}'.format(host=API_URL, app=app) request = requests.get(url, verify=GATE_CA_BUNDLE, cert=GATE_CLIENT_CERT) if not request.ok: raise SpinnakerAppNotFound('"{0}" not found.'.format(app)) app_details = request.json() LOG.debug('App details: %s', app_details) group = app_details['attributes'].get('repoProjectKey') project = app_details['attributes'].get('repoSlug') generated = gogoutils.Generator(group, project, env=env, region=region, formats=APP_FORMATS) LOG.debug('Application details: %s', generated) return generated
python
def get_details(app='groupproject', env='dev', region='us-east-1'): """Extract details for Application. Args: app (str): Application Name env (str): Environment/account to get details from Returns: collections.namedtuple with _group_, _policy_, _profile_, _role_, _user_. """ url = '{host}/applications/{app}'.format(host=API_URL, app=app) request = requests.get(url, verify=GATE_CA_BUNDLE, cert=GATE_CLIENT_CERT) if not request.ok: raise SpinnakerAppNotFound('"{0}" not found.'.format(app)) app_details = request.json() LOG.debug('App details: %s', app_details) group = app_details['attributes'].get('repoProjectKey') project = app_details['attributes'].get('repoSlug') generated = gogoutils.Generator(group, project, env=env, region=region, formats=APP_FORMATS) LOG.debug('Application details: %s', generated) return generated
[ "def", "get_details", "(", "app", "=", "'groupproject'", ",", "env", "=", "'dev'", ",", "region", "=", "'us-east-1'", ")", ":", "url", "=", "'{host}/applications/{app}'", ".", "format", "(", "host", "=", "API_URL", ",", "app", "=", "app", ")", "request", "=", "requests", ".", "get", "(", "url", ",", "verify", "=", "GATE_CA_BUNDLE", ",", "cert", "=", "GATE_CLIENT_CERT", ")", "if", "not", "request", ".", "ok", ":", "raise", "SpinnakerAppNotFound", "(", "'\"{0}\" not found.'", ".", "format", "(", "app", ")", ")", "app_details", "=", "request", ".", "json", "(", ")", "LOG", ".", "debug", "(", "'App details: %s'", ",", "app_details", ")", "group", "=", "app_details", "[", "'attributes'", "]", ".", "get", "(", "'repoProjectKey'", ")", "project", "=", "app_details", "[", "'attributes'", "]", ".", "get", "(", "'repoSlug'", ")", "generated", "=", "gogoutils", ".", "Generator", "(", "group", ",", "project", ",", "env", "=", "env", ",", "region", "=", "region", ",", "formats", "=", "APP_FORMATS", ")", "LOG", ".", "debug", "(", "'Application details: %s'", ",", "generated", ")", "return", "generated" ]
Extract details for Application. Args: app (str): Application Name env (str): Environment/account to get details from Returns: collections.namedtuple with _group_, _policy_, _profile_, _role_, _user_.
[ "Extract", "details", "for", "Application", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/utils/apps.py#L47-L74
train
foremast/foremast
src/foremast/pipeline/create_pipeline_s3.py
SpinnakerPipelineS3.create_pipeline
def create_pipeline(self): """Main wrapper for pipeline creation. 1. Runs clean_pipelines to clean up existing ones 2. determines which environments the pipeline needs 3. Renders all of the pipeline blocks as defined in configs 4. Runs post_pipeline to create pipeline """ clean_pipelines(app=self.app_name, settings=self.settings) pipeline_envs = self.environments self.log.debug('Envs from pipeline.json: %s', pipeline_envs) regions_envs = collections.defaultdict(list) for env in pipeline_envs: for region in self.settings[env]['regions']: regions_envs[region].append(env) self.log.info('Environments and Regions for Pipelines:\n%s', json.dumps(regions_envs, indent=4)) pipelines = {} for region, envs in regions_envs.items(): # TODO: Overrides for an environment no longer makes sense. Need to # provide override for entire Region possibly. pipelines[region] = self.render_wrapper(region=region) previous_env = None for env in envs: block = construct_pipeline_block_s3( env=env, generated=self.generated, previous_env=previous_env, region=region, settings=self.settings[env][region], pipeline_data=self.settings['pipeline']) pipelines[region]['stages'].extend(json.loads(block)) previous_env = env self.log.debug('Assembled Pipelines:\n%s', pformat(pipelines)) for region, pipeline in pipelines.items(): renumerate_stages(pipeline) self.post_pipeline(pipeline) return True
python
def create_pipeline(self): """Main wrapper for pipeline creation. 1. Runs clean_pipelines to clean up existing ones 2. determines which environments the pipeline needs 3. Renders all of the pipeline blocks as defined in configs 4. Runs post_pipeline to create pipeline """ clean_pipelines(app=self.app_name, settings=self.settings) pipeline_envs = self.environments self.log.debug('Envs from pipeline.json: %s', pipeline_envs) regions_envs = collections.defaultdict(list) for env in pipeline_envs: for region in self.settings[env]['regions']: regions_envs[region].append(env) self.log.info('Environments and Regions for Pipelines:\n%s', json.dumps(regions_envs, indent=4)) pipelines = {} for region, envs in regions_envs.items(): # TODO: Overrides for an environment no longer makes sense. Need to # provide override for entire Region possibly. pipelines[region] = self.render_wrapper(region=region) previous_env = None for env in envs: block = construct_pipeline_block_s3( env=env, generated=self.generated, previous_env=previous_env, region=region, settings=self.settings[env][region], pipeline_data=self.settings['pipeline']) pipelines[region]['stages'].extend(json.loads(block)) previous_env = env self.log.debug('Assembled Pipelines:\n%s', pformat(pipelines)) for region, pipeline in pipelines.items(): renumerate_stages(pipeline) self.post_pipeline(pipeline) return True
[ "def", "create_pipeline", "(", "self", ")", ":", "clean_pipelines", "(", "app", "=", "self", ".", "app_name", ",", "settings", "=", "self", ".", "settings", ")", "pipeline_envs", "=", "self", ".", "environments", "self", ".", "log", ".", "debug", "(", "'Envs from pipeline.json: %s'", ",", "pipeline_envs", ")", "regions_envs", "=", "collections", ".", "defaultdict", "(", "list", ")", "for", "env", "in", "pipeline_envs", ":", "for", "region", "in", "self", ".", "settings", "[", "env", "]", "[", "'regions'", "]", ":", "regions_envs", "[", "region", "]", ".", "append", "(", "env", ")", "self", ".", "log", ".", "info", "(", "'Environments and Regions for Pipelines:\\n%s'", ",", "json", ".", "dumps", "(", "regions_envs", ",", "indent", "=", "4", ")", ")", "pipelines", "=", "{", "}", "for", "region", ",", "envs", "in", "regions_envs", ".", "items", "(", ")", ":", "# TODO: Overrides for an environment no longer makes sense. Need to", "# provide override for entire Region possibly.", "pipelines", "[", "region", "]", "=", "self", ".", "render_wrapper", "(", "region", "=", "region", ")", "previous_env", "=", "None", "for", "env", "in", "envs", ":", "block", "=", "construct_pipeline_block_s3", "(", "env", "=", "env", ",", "generated", "=", "self", ".", "generated", ",", "previous_env", "=", "previous_env", ",", "region", "=", "region", ",", "settings", "=", "self", ".", "settings", "[", "env", "]", "[", "region", "]", ",", "pipeline_data", "=", "self", ".", "settings", "[", "'pipeline'", "]", ")", "pipelines", "[", "region", "]", "[", "'stages'", "]", ".", "extend", "(", "json", ".", "loads", "(", "block", ")", ")", "previous_env", "=", "env", "self", ".", "log", ".", "debug", "(", "'Assembled Pipelines:\\n%s'", ",", "pformat", "(", "pipelines", ")", ")", "for", "region", ",", "pipeline", "in", "pipelines", ".", "items", "(", ")", ":", "renumerate_stages", "(", "pipeline", ")", "self", ".", "post_pipeline", "(", "pipeline", ")", "return", "True" ]
Main wrapper for pipeline creation. 1. Runs clean_pipelines to clean up existing ones 2. determines which environments the pipeline needs 3. Renders all of the pipeline blocks as defined in configs 4. Runs post_pipeline to create pipeline
[ "Main", "wrapper", "for", "pipeline", "creation", ".", "1", ".", "Runs", "clean_pipelines", "to", "clean", "up", "existing", "ones", "2", ".", "determines", "which", "environments", "the", "pipeline", "needs", "3", ".", "Renders", "all", "of", "the", "pipeline", "blocks", "as", "defined", "in", "configs", "4", ".", "Runs", "post_pipeline", "to", "create", "pipeline" ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/pipeline/create_pipeline_s3.py#L84-L129
train
foremast/foremast
src/foremast/awslambda/awslambda.py
LambdaFunction._check_lambda
def _check_lambda(self): """Check if lambda function exists. Returns: True if function does exist False if function does not exist """ exists = False try: self.lambda_client.get_function(FunctionName=self.app_name) exists = True except boto3.exceptions.botocore.exceptions.ClientError: pass return exists
python
def _check_lambda(self): """Check if lambda function exists. Returns: True if function does exist False if function does not exist """ exists = False try: self.lambda_client.get_function(FunctionName=self.app_name) exists = True except boto3.exceptions.botocore.exceptions.ClientError: pass return exists
[ "def", "_check_lambda", "(", "self", ")", ":", "exists", "=", "False", "try", ":", "self", ".", "lambda_client", ".", "get_function", "(", "FunctionName", "=", "self", ".", "app_name", ")", "exists", "=", "True", "except", "boto3", ".", "exceptions", ".", "botocore", ".", "exceptions", ".", "ClientError", ":", "pass", "return", "exists" ]
Check if lambda function exists. Returns: True if function does exist False if function does not exist
[ "Check", "if", "lambda", "function", "exists", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/awslambda/awslambda.py#L72-L85
train
foremast/foremast
src/foremast/awslambda/awslambda.py
LambdaFunction._check_lambda_alias
def _check_lambda_alias(self): """Check if lambda alias exists. Returns: True if alias exists False if alias does not exist """ aliases = self.lambda_client.list_aliases(FunctionName=self.app_name) matched_alias = False for alias in aliases['Aliases']: if alias['Name'] == self.env: LOG.info('Found alias %s for function %s', self.env, self.app_name) matched_alias = True break else: LOG.info('No alias %s found for function %s', self.env, self.app_name) return matched_alias
python
def _check_lambda_alias(self): """Check if lambda alias exists. Returns: True if alias exists False if alias does not exist """ aliases = self.lambda_client.list_aliases(FunctionName=self.app_name) matched_alias = False for alias in aliases['Aliases']: if alias['Name'] == self.env: LOG.info('Found alias %s for function %s', self.env, self.app_name) matched_alias = True break else: LOG.info('No alias %s found for function %s', self.env, self.app_name) return matched_alias
[ "def", "_check_lambda_alias", "(", "self", ")", ":", "aliases", "=", "self", ".", "lambda_client", ".", "list_aliases", "(", "FunctionName", "=", "self", ".", "app_name", ")", "matched_alias", "=", "False", "for", "alias", "in", "aliases", "[", "'Aliases'", "]", ":", "if", "alias", "[", "'Name'", "]", "==", "self", ".", "env", ":", "LOG", ".", "info", "(", "'Found alias %s for function %s'", ",", "self", ".", "env", ",", "self", ".", "app_name", ")", "matched_alias", "=", "True", "break", "else", ":", "LOG", ".", "info", "(", "'No alias %s found for function %s'", ",", "self", ".", "env", ",", "self", ".", "app_name", ")", "return", "matched_alias" ]
Check if lambda alias exists. Returns: True if alias exists False if alias does not exist
[ "Check", "if", "lambda", "alias", "exists", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/awslambda/awslambda.py#L87-L104
train
foremast/foremast
src/foremast/awslambda/awslambda.py
LambdaFunction._vpc_config
def _vpc_config(self): """Get VPC config.""" if self.vpc_enabled: subnets = get_subnets(env=self.env, region=self.region, purpose='internal')['subnet_ids'][self.region] security_groups = self._get_sg_ids() vpc_config = {'SubnetIds': subnets, 'SecurityGroupIds': security_groups} else: vpc_config = {'SubnetIds': [], 'SecurityGroupIds': []} LOG.debug("Lambda VPC config setup: %s", vpc_config) return vpc_config
python
def _vpc_config(self): """Get VPC config.""" if self.vpc_enabled: subnets = get_subnets(env=self.env, region=self.region, purpose='internal')['subnet_ids'][self.region] security_groups = self._get_sg_ids() vpc_config = {'SubnetIds': subnets, 'SecurityGroupIds': security_groups} else: vpc_config = {'SubnetIds': [], 'SecurityGroupIds': []} LOG.debug("Lambda VPC config setup: %s", vpc_config) return vpc_config
[ "def", "_vpc_config", "(", "self", ")", ":", "if", "self", ".", "vpc_enabled", ":", "subnets", "=", "get_subnets", "(", "env", "=", "self", ".", "env", ",", "region", "=", "self", ".", "region", ",", "purpose", "=", "'internal'", ")", "[", "'subnet_ids'", "]", "[", "self", ".", "region", "]", "security_groups", "=", "self", ".", "_get_sg_ids", "(", ")", "vpc_config", "=", "{", "'SubnetIds'", ":", "subnets", ",", "'SecurityGroupIds'", ":", "security_groups", "}", "else", ":", "vpc_config", "=", "{", "'SubnetIds'", ":", "[", "]", ",", "'SecurityGroupIds'", ":", "[", "]", "}", "LOG", ".", "debug", "(", "\"Lambda VPC config setup: %s\"", ",", "vpc_config", ")", "return", "vpc_config" ]
Get VPC config.
[ "Get", "VPC", "config", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/awslambda/awslambda.py#L106-L116
train
foremast/foremast
src/foremast/awslambda/awslambda.py
LambdaFunction._get_sg_ids
def _get_sg_ids(self): """Get IDs for all defined security groups. Returns: list: security group IDs for all lambda_extras """ try: lambda_extras = self.settings['security_groups']['lambda_extras'] except KeyError: lambda_extras = [] security_groups = [self.app_name] + lambda_extras sg_ids = [] for security_group in security_groups: sg_id = get_security_group_id(name=security_group, env=self.env, region=self.region) sg_ids.append(sg_id) return sg_ids
python
def _get_sg_ids(self): """Get IDs for all defined security groups. Returns: list: security group IDs for all lambda_extras """ try: lambda_extras = self.settings['security_groups']['lambda_extras'] except KeyError: lambda_extras = [] security_groups = [self.app_name] + lambda_extras sg_ids = [] for security_group in security_groups: sg_id = get_security_group_id(name=security_group, env=self.env, region=self.region) sg_ids.append(sg_id) return sg_ids
[ "def", "_get_sg_ids", "(", "self", ")", ":", "try", ":", "lambda_extras", "=", "self", ".", "settings", "[", "'security_groups'", "]", "[", "'lambda_extras'", "]", "except", "KeyError", ":", "lambda_extras", "=", "[", "]", "security_groups", "=", "[", "self", ".", "app_name", "]", "+", "lambda_extras", "sg_ids", "=", "[", "]", "for", "security_group", "in", "security_groups", ":", "sg_id", "=", "get_security_group_id", "(", "name", "=", "security_group", ",", "env", "=", "self", ".", "env", ",", "region", "=", "self", ".", "region", ")", "sg_ids", ".", "append", "(", "sg_id", ")", "return", "sg_ids" ]
Get IDs for all defined security groups. Returns: list: security group IDs for all lambda_extras
[ "Get", "IDs", "for", "all", "defined", "security", "groups", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/awslambda/awslambda.py#L118-L134
train
foremast/foremast
src/foremast/awslambda/awslambda.py
LambdaFunction.update_function_configuration
def update_function_configuration(self, vpc_config): """Update existing Lambda function configuration. Args: vpc_config (dict): Dictionary of SubnetIds and SecurityGroupsIds for using a VPC in lambda """ LOG.info('Updating configuration for lambda function: %s', self.app_name) try: self.lambda_client.update_function_configuration( Environment=self.lambda_environment, FunctionName=self.app_name, Runtime=self.runtime, Role=self.role_arn, Handler=self.handler, Description=self.description, Timeout=int(self.timeout), MemorySize=int(self.memory), VpcConfig=vpc_config) if self.concurrency_limit: self.lambda_client.put_function_concurrency( FunctionName=self.app_name, ReservedConcurrentExecutions=self.concurrency_limit ) else: self.lambda_client.delete_function_concurrency(FunctionName=self.app_name) except boto3.exceptions.botocore.exceptions.ClientError as error: if 'CreateNetworkInterface' in error.response['Error']['Message']: message = '{0} is missing "ec2:CreateNetworkInterface"'.format(self.role_arn) LOG.debug(message) raise SystemExit(message) raise LOG.info('Updating Lambda function tags') lambda_arn = get_lambda_arn(self.app_name, self.env, self.region) self.lambda_client.tag_resource(Resource=lambda_arn, Tags={'app_group': self.group, 'app_name': self.app_name}) LOG.info("Successfully updated Lambda configuration.")
python
def update_function_configuration(self, vpc_config): """Update existing Lambda function configuration. Args: vpc_config (dict): Dictionary of SubnetIds and SecurityGroupsIds for using a VPC in lambda """ LOG.info('Updating configuration for lambda function: %s', self.app_name) try: self.lambda_client.update_function_configuration( Environment=self.lambda_environment, FunctionName=self.app_name, Runtime=self.runtime, Role=self.role_arn, Handler=self.handler, Description=self.description, Timeout=int(self.timeout), MemorySize=int(self.memory), VpcConfig=vpc_config) if self.concurrency_limit: self.lambda_client.put_function_concurrency( FunctionName=self.app_name, ReservedConcurrentExecutions=self.concurrency_limit ) else: self.lambda_client.delete_function_concurrency(FunctionName=self.app_name) except boto3.exceptions.botocore.exceptions.ClientError as error: if 'CreateNetworkInterface' in error.response['Error']['Message']: message = '{0} is missing "ec2:CreateNetworkInterface"'.format(self.role_arn) LOG.debug(message) raise SystemExit(message) raise LOG.info('Updating Lambda function tags') lambda_arn = get_lambda_arn(self.app_name, self.env, self.region) self.lambda_client.tag_resource(Resource=lambda_arn, Tags={'app_group': self.group, 'app_name': self.app_name}) LOG.info("Successfully updated Lambda configuration.")
[ "def", "update_function_configuration", "(", "self", ",", "vpc_config", ")", ":", "LOG", ".", "info", "(", "'Updating configuration for lambda function: %s'", ",", "self", ".", "app_name", ")", "try", ":", "self", ".", "lambda_client", ".", "update_function_configuration", "(", "Environment", "=", "self", ".", "lambda_environment", ",", "FunctionName", "=", "self", ".", "app_name", ",", "Runtime", "=", "self", ".", "runtime", ",", "Role", "=", "self", ".", "role_arn", ",", "Handler", "=", "self", ".", "handler", ",", "Description", "=", "self", ".", "description", ",", "Timeout", "=", "int", "(", "self", ".", "timeout", ")", ",", "MemorySize", "=", "int", "(", "self", ".", "memory", ")", ",", "VpcConfig", "=", "vpc_config", ")", "if", "self", ".", "concurrency_limit", ":", "self", ".", "lambda_client", ".", "put_function_concurrency", "(", "FunctionName", "=", "self", ".", "app_name", ",", "ReservedConcurrentExecutions", "=", "self", ".", "concurrency_limit", ")", "else", ":", "self", ".", "lambda_client", ".", "delete_function_concurrency", "(", "FunctionName", "=", "self", ".", "app_name", ")", "except", "boto3", ".", "exceptions", ".", "botocore", ".", "exceptions", ".", "ClientError", "as", "error", ":", "if", "'CreateNetworkInterface'", "in", "error", ".", "response", "[", "'Error'", "]", "[", "'Message'", "]", ":", "message", "=", "'{0} is missing \"ec2:CreateNetworkInterface\"'", ".", "format", "(", "self", ".", "role_arn", ")", "LOG", ".", "debug", "(", "message", ")", "raise", "SystemExit", "(", "message", ")", "raise", "LOG", ".", "info", "(", "'Updating Lambda function tags'", ")", "lambda_arn", "=", "get_lambda_arn", "(", "self", ".", "app_name", ",", "self", ".", "env", ",", "self", ".", "region", ")", "self", ".", "lambda_client", ".", "tag_resource", "(", "Resource", "=", "lambda_arn", ",", "Tags", "=", "{", "'app_group'", ":", "self", ".", "group", ",", "'app_name'", ":", "self", ".", "app_name", "}", ")", "LOG", ".", "info", "(", "\"Successfully updated Lambda configuration.\"", ")" ]
Update existing Lambda function configuration. Args: vpc_config (dict): Dictionary of SubnetIds and SecurityGroupsIds for using a VPC in lambda
[ "Update", "existing", "Lambda", "function", "configuration", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/awslambda/awslambda.py#L165-L206
train
foremast/foremast
src/foremast/awslambda/awslambda.py
LambdaFunction.create_function
def create_function(self, vpc_config): """Create lambda function, configures lambda parameters. We need to upload non-zero zip when creating function. Uploading hello_world python lambda function since AWS doesn't care which executable is in ZIP. Args: vpc_config (dict): Dictionary of SubnetIds and SecurityGroupsIds for using a VPC in lambda """ zip_file = 'lambda-holder.zip' with zipfile.ZipFile(zip_file, mode='w') as zipped: zipped.writestr('index.py', 'print "Hello world"') contents = '' with open('lambda-holder.zip', 'rb') as openfile: contents = openfile.read() LOG.info('Creating lambda function: %s', self.app_name) try: self.lambda_client.create_function( Environment=self.lambda_environment, FunctionName=self.app_name, Runtime=self.runtime, Role=self.role_arn, Handler=self.handler, Code={'ZipFile': contents}, Description=self.description, Timeout=int(self.timeout), MemorySize=int(self.memory), Publish=False, VpcConfig=vpc_config, Tags={'app_group': self.group, 'app_name': self.app_name}) except boto3.exceptions.botocore.exceptions.ClientError as error: if 'CreateNetworkInterface' in error.response['Error']['Message']: message = '{0} is missing "ec2:CreateNetworkInterface"'.format(self.role_arn) LOG.critical(message) raise SystemExit(message) raise LOG.info("Successfully created Lambda function and alias")
python
def create_function(self, vpc_config): """Create lambda function, configures lambda parameters. We need to upload non-zero zip when creating function. Uploading hello_world python lambda function since AWS doesn't care which executable is in ZIP. Args: vpc_config (dict): Dictionary of SubnetIds and SecurityGroupsIds for using a VPC in lambda """ zip_file = 'lambda-holder.zip' with zipfile.ZipFile(zip_file, mode='w') as zipped: zipped.writestr('index.py', 'print "Hello world"') contents = '' with open('lambda-holder.zip', 'rb') as openfile: contents = openfile.read() LOG.info('Creating lambda function: %s', self.app_name) try: self.lambda_client.create_function( Environment=self.lambda_environment, FunctionName=self.app_name, Runtime=self.runtime, Role=self.role_arn, Handler=self.handler, Code={'ZipFile': contents}, Description=self.description, Timeout=int(self.timeout), MemorySize=int(self.memory), Publish=False, VpcConfig=vpc_config, Tags={'app_group': self.group, 'app_name': self.app_name}) except boto3.exceptions.botocore.exceptions.ClientError as error: if 'CreateNetworkInterface' in error.response['Error']['Message']: message = '{0} is missing "ec2:CreateNetworkInterface"'.format(self.role_arn) LOG.critical(message) raise SystemExit(message) raise LOG.info("Successfully created Lambda function and alias")
[ "def", "create_function", "(", "self", ",", "vpc_config", ")", ":", "zip_file", "=", "'lambda-holder.zip'", "with", "zipfile", ".", "ZipFile", "(", "zip_file", ",", "mode", "=", "'w'", ")", "as", "zipped", ":", "zipped", ".", "writestr", "(", "'index.py'", ",", "'print \"Hello world\"'", ")", "contents", "=", "''", "with", "open", "(", "'lambda-holder.zip'", ",", "'rb'", ")", "as", "openfile", ":", "contents", "=", "openfile", ".", "read", "(", ")", "LOG", ".", "info", "(", "'Creating lambda function: %s'", ",", "self", ".", "app_name", ")", "try", ":", "self", ".", "lambda_client", ".", "create_function", "(", "Environment", "=", "self", ".", "lambda_environment", ",", "FunctionName", "=", "self", ".", "app_name", ",", "Runtime", "=", "self", ".", "runtime", ",", "Role", "=", "self", ".", "role_arn", ",", "Handler", "=", "self", ".", "handler", ",", "Code", "=", "{", "'ZipFile'", ":", "contents", "}", ",", "Description", "=", "self", ".", "description", ",", "Timeout", "=", "int", "(", "self", ".", "timeout", ")", ",", "MemorySize", "=", "int", "(", "self", ".", "memory", ")", ",", "Publish", "=", "False", ",", "VpcConfig", "=", "vpc_config", ",", "Tags", "=", "{", "'app_group'", ":", "self", ".", "group", ",", "'app_name'", ":", "self", ".", "app_name", "}", ")", "except", "boto3", ".", "exceptions", ".", "botocore", ".", "exceptions", ".", "ClientError", "as", "error", ":", "if", "'CreateNetworkInterface'", "in", "error", ".", "response", "[", "'Error'", "]", "[", "'Message'", "]", ":", "message", "=", "'{0} is missing \"ec2:CreateNetworkInterface\"'", ".", "format", "(", "self", ".", "role_arn", ")", "LOG", ".", "critical", "(", "message", ")", "raise", "SystemExit", "(", "message", ")", "raise", "LOG", ".", "info", "(", "\"Successfully created Lambda function and alias\"", ")" ]
Create lambda function, configures lambda parameters. We need to upload non-zero zip when creating function. Uploading hello_world python lambda function since AWS doesn't care which executable is in ZIP. Args: vpc_config (dict): Dictionary of SubnetIds and SecurityGroupsIds for using a VPC in lambda
[ "Create", "lambda", "function", "configures", "lambda", "parameters", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/awslambda/awslambda.py#L209-L253
train
foremast/foremast
src/foremast/awslambda/awslambda.py
LambdaFunction.create_lambda_function
def create_lambda_function(self): """Create or update Lambda function.""" vpc_config = self._vpc_config() if self._check_lambda(): self.update_function_configuration(vpc_config) else: self.create_function(vpc_config) if self._check_lambda_alias(): self.update_alias() else: self.create_alias()
python
def create_lambda_function(self): """Create or update Lambda function.""" vpc_config = self._vpc_config() if self._check_lambda(): self.update_function_configuration(vpc_config) else: self.create_function(vpc_config) if self._check_lambda_alias(): self.update_alias() else: self.create_alias()
[ "def", "create_lambda_function", "(", "self", ")", ":", "vpc_config", "=", "self", ".", "_vpc_config", "(", ")", "if", "self", ".", "_check_lambda", "(", ")", ":", "self", ".", "update_function_configuration", "(", "vpc_config", ")", "else", ":", "self", ".", "create_function", "(", "vpc_config", ")", "if", "self", ".", "_check_lambda_alias", "(", ")", ":", "self", ".", "update_alias", "(", ")", "else", ":", "self", ".", "create_alias", "(", ")" ]
Create or update Lambda function.
[ "Create", "or", "update", "Lambda", "function", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/awslambda/awslambda.py#L255-L267
train
foremast/foremast
src/foremast/securitygroup/destroy_sg/destroy_sg.py
destroy_sg
def destroy_sg(app='', env='', region='', **_): """Destroy Security Group. Args: app (str): Spinnaker Application name. env (str): Deployment environment. region (str): Region name, e.g. us-east-1. Returns: True upon successful completion. """ vpc = get_vpc_id(account=env, region=region) url = '{api}/securityGroups/{env}/{region}/{app}'.format(api=API_URL, env=env, region=region, app=app) payload = {'vpcId': vpc} security_group = requests.get(url, params=payload, verify=GATE_CA_BUNDLE, cert=GATE_CLIENT_CERT) if not security_group: LOG.info('Nothing to delete.') else: LOG.info('Found Security Group in %(region)s: %(name)s', security_group) destroy_request = get_template('destroy/destroy_sg.json.j2', app=app, env=env, region=region, vpc=vpc) wait_for_task(destroy_request) return True
python
def destroy_sg(app='', env='', region='', **_): """Destroy Security Group. Args: app (str): Spinnaker Application name. env (str): Deployment environment. region (str): Region name, e.g. us-east-1. Returns: True upon successful completion. """ vpc = get_vpc_id(account=env, region=region) url = '{api}/securityGroups/{env}/{region}/{app}'.format(api=API_URL, env=env, region=region, app=app) payload = {'vpcId': vpc} security_group = requests.get(url, params=payload, verify=GATE_CA_BUNDLE, cert=GATE_CLIENT_CERT) if not security_group: LOG.info('Nothing to delete.') else: LOG.info('Found Security Group in %(region)s: %(name)s', security_group) destroy_request = get_template('destroy/destroy_sg.json.j2', app=app, env=env, region=region, vpc=vpc) wait_for_task(destroy_request) return True
[ "def", "destroy_sg", "(", "app", "=", "''", ",", "env", "=", "''", ",", "region", "=", "''", ",", "*", "*", "_", ")", ":", "vpc", "=", "get_vpc_id", "(", "account", "=", "env", ",", "region", "=", "region", ")", "url", "=", "'{api}/securityGroups/{env}/{region}/{app}'", ".", "format", "(", "api", "=", "API_URL", ",", "env", "=", "env", ",", "region", "=", "region", ",", "app", "=", "app", ")", "payload", "=", "{", "'vpcId'", ":", "vpc", "}", "security_group", "=", "requests", ".", "get", "(", "url", ",", "params", "=", "payload", ",", "verify", "=", "GATE_CA_BUNDLE", ",", "cert", "=", "GATE_CLIENT_CERT", ")", "if", "not", "security_group", ":", "LOG", ".", "info", "(", "'Nothing to delete.'", ")", "else", ":", "LOG", ".", "info", "(", "'Found Security Group in %(region)s: %(name)s'", ",", "security_group", ")", "destroy_request", "=", "get_template", "(", "'destroy/destroy_sg.json.j2'", ",", "app", "=", "app", ",", "env", "=", "env", ",", "region", "=", "region", ",", "vpc", "=", "vpc", ")", "wait_for_task", "(", "destroy_request", ")", "return", "True" ]
Destroy Security Group. Args: app (str): Spinnaker Application name. env (str): Deployment environment. region (str): Region name, e.g. us-east-1. Returns: True upon successful completion.
[ "Destroy", "Security", "Group", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/securitygroup/destroy_sg/destroy_sg.py#L27-L52
train
foremast/foremast
src/foremast/s3/destroy_s3/destroy_s3.py
destroy_s3
def destroy_s3(app='', env='dev', **_): """Destroy S3 Resources for _app_ in _env_. Args: app (str): Application name env (str): Deployment environment/account name Returns: boolean: True if destroyed sucessfully """ session = boto3.Session(profile_name=env) client = session.resource('s3') generated = get_details(app=app, env=env) archaius = generated.archaius() bucket = client.Bucket(archaius['bucket']) for item in bucket.objects.filter(Prefix=archaius['path']): item.Object().delete() LOG.info('Deleted: %s/%s', item.bucket_name, item.key) return True
python
def destroy_s3(app='', env='dev', **_): """Destroy S3 Resources for _app_ in _env_. Args: app (str): Application name env (str): Deployment environment/account name Returns: boolean: True if destroyed sucessfully """ session = boto3.Session(profile_name=env) client = session.resource('s3') generated = get_details(app=app, env=env) archaius = generated.archaius() bucket = client.Bucket(archaius['bucket']) for item in bucket.objects.filter(Prefix=archaius['path']): item.Object().delete() LOG.info('Deleted: %s/%s', item.bucket_name, item.key) return True
[ "def", "destroy_s3", "(", "app", "=", "''", ",", "env", "=", "'dev'", ",", "*", "*", "_", ")", ":", "session", "=", "boto3", ".", "Session", "(", "profile_name", "=", "env", ")", "client", "=", "session", ".", "resource", "(", "'s3'", ")", "generated", "=", "get_details", "(", "app", "=", "app", ",", "env", "=", "env", ")", "archaius", "=", "generated", ".", "archaius", "(", ")", "bucket", "=", "client", ".", "Bucket", "(", "archaius", "[", "'bucket'", "]", ")", "for", "item", "in", "bucket", ".", "objects", ".", "filter", "(", "Prefix", "=", "archaius", "[", "'path'", "]", ")", ":", "item", ".", "Object", "(", ")", ".", "delete", "(", ")", "LOG", ".", "info", "(", "'Deleted: %s/%s'", ",", "item", ".", "bucket_name", ",", "item", ".", "key", ")", "return", "True" ]
Destroy S3 Resources for _app_ in _env_. Args: app (str): Application name env (str): Deployment environment/account name Returns: boolean: True if destroyed sucessfully
[ "Destroy", "S3", "Resources", "for", "_app_", "in", "_env_", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/s3/destroy_s3/destroy_s3.py#L26-L48
train
foremast/foremast
src/foremast/app/__main__.py
main
def main(): """Entry point for creating a Spinnaker application.""" # Setup parser parser = argparse.ArgumentParser() add_debug(parser) add_app(parser) parser.add_argument( '--email', help='Email address to associate with application', default='PS-DevOpsTooling@example.com') parser.add_argument('--project', help='Git project to associate with application', default='None') parser.add_argument('--repo', help='Git repo to associate with application', default='None') parser.add_argument('--git', help='Git URI', default=None) args = parser.parse_args() logging.basicConfig(format=LOGGING_FORMAT) logging.getLogger(__package__.split('.')[0]).setLevel(args.debug) if args.git and args.git != 'None': parsed = gogoutils.Parser(args.git).parse_url() generated = gogoutils.Generator(*parsed, formats=APP_FORMATS) project = generated.project repo = generated.repo else: project = args.project repo = args.repo spinnakerapps = SpinnakerApp(app=args.app, email=args.email, project=project, repo=repo) spinnakerapps.create_app()
python
def main(): """Entry point for creating a Spinnaker application.""" # Setup parser parser = argparse.ArgumentParser() add_debug(parser) add_app(parser) parser.add_argument( '--email', help='Email address to associate with application', default='PS-DevOpsTooling@example.com') parser.add_argument('--project', help='Git project to associate with application', default='None') parser.add_argument('--repo', help='Git repo to associate with application', default='None') parser.add_argument('--git', help='Git URI', default=None) args = parser.parse_args() logging.basicConfig(format=LOGGING_FORMAT) logging.getLogger(__package__.split('.')[0]).setLevel(args.debug) if args.git and args.git != 'None': parsed = gogoutils.Parser(args.git).parse_url() generated = gogoutils.Generator(*parsed, formats=APP_FORMATS) project = generated.project repo = generated.repo else: project = args.project repo = args.repo spinnakerapps = SpinnakerApp(app=args.app, email=args.email, project=project, repo=repo) spinnakerapps.create_app()
[ "def", "main", "(", ")", ":", "# Setup parser", "parser", "=", "argparse", ".", "ArgumentParser", "(", ")", "add_debug", "(", "parser", ")", "add_app", "(", "parser", ")", "parser", ".", "add_argument", "(", "'--email'", ",", "help", "=", "'Email address to associate with application'", ",", "default", "=", "'PS-DevOpsTooling@example.com'", ")", "parser", ".", "add_argument", "(", "'--project'", ",", "help", "=", "'Git project to associate with application'", ",", "default", "=", "'None'", ")", "parser", ".", "add_argument", "(", "'--repo'", ",", "help", "=", "'Git repo to associate with application'", ",", "default", "=", "'None'", ")", "parser", ".", "add_argument", "(", "'--git'", ",", "help", "=", "'Git URI'", ",", "default", "=", "None", ")", "args", "=", "parser", ".", "parse_args", "(", ")", "logging", ".", "basicConfig", "(", "format", "=", "LOGGING_FORMAT", ")", "logging", ".", "getLogger", "(", "__package__", ".", "split", "(", "'.'", ")", "[", "0", "]", ")", ".", "setLevel", "(", "args", ".", "debug", ")", "if", "args", ".", "git", "and", "args", ".", "git", "!=", "'None'", ":", "parsed", "=", "gogoutils", ".", "Parser", "(", "args", ".", "git", ")", ".", "parse_url", "(", ")", "generated", "=", "gogoutils", ".", "Generator", "(", "*", "parsed", ",", "formats", "=", "APP_FORMATS", ")", "project", "=", "generated", ".", "project", "repo", "=", "generated", ".", "repo", "else", ":", "project", "=", "args", ".", "project", "repo", "=", "args", ".", "repo", "spinnakerapps", "=", "SpinnakerApp", "(", "app", "=", "args", ".", "app", ",", "email", "=", "args", ".", "email", ",", "project", "=", "project", ",", "repo", "=", "repo", ")", "spinnakerapps", ".", "create_app", "(", ")" ]
Entry point for creating a Spinnaker application.
[ "Entry", "point", "for", "creating", "a", "Spinnaker", "application", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/app/__main__.py#L30-L56
train
foremast/foremast
src/foremast/awslambda/s3_event/destroy_s3_event/destroy_s3_event.py
destroy_s3_event
def destroy_s3_event(app, env, region): """Destroy S3 event. Args: app (str): Spinnaker Application name. env (str): Deployment environment. region (str): AWS region. Returns: bool: True upon successful completion. """ # TODO: how do we know which bucket to process if triggers dict is empty? # Maybe list buckets and see which has notification to that lambda defined? # TODO: buckets should be named the same as apps, what if one app has multiple buckets? # bucket = rules.get('bucket') generated = get_details(app=app, env=env) bucket = generated.s3_app_bucket() session = boto3.Session(profile_name=env, region_name=region) s3_client = session.client('s3') config = {} s3_client.put_bucket_notification_configuration(Bucket=bucket, NotificationConfiguration=config) LOG.debug("Deleted Lambda S3 notification") return True
python
def destroy_s3_event(app, env, region): """Destroy S3 event. Args: app (str): Spinnaker Application name. env (str): Deployment environment. region (str): AWS region. Returns: bool: True upon successful completion. """ # TODO: how do we know which bucket to process if triggers dict is empty? # Maybe list buckets and see which has notification to that lambda defined? # TODO: buckets should be named the same as apps, what if one app has multiple buckets? # bucket = rules.get('bucket') generated = get_details(app=app, env=env) bucket = generated.s3_app_bucket() session = boto3.Session(profile_name=env, region_name=region) s3_client = session.client('s3') config = {} s3_client.put_bucket_notification_configuration(Bucket=bucket, NotificationConfiguration=config) LOG.debug("Deleted Lambda S3 notification") return True
[ "def", "destroy_s3_event", "(", "app", ",", "env", ",", "region", ")", ":", "# TODO: how do we know which bucket to process if triggers dict is empty?", "# Maybe list buckets and see which has notification to that lambda defined?", "# TODO: buckets should be named the same as apps, what if one app has multiple buckets?", "# bucket = rules.get('bucket')", "generated", "=", "get_details", "(", "app", "=", "app", ",", "env", "=", "env", ")", "bucket", "=", "generated", ".", "s3_app_bucket", "(", ")", "session", "=", "boto3", ".", "Session", "(", "profile_name", "=", "env", ",", "region_name", "=", "region", ")", "s3_client", "=", "session", ".", "client", "(", "'s3'", ")", "config", "=", "{", "}", "s3_client", ".", "put_bucket_notification_configuration", "(", "Bucket", "=", "bucket", ",", "NotificationConfiguration", "=", "config", ")", "LOG", ".", "debug", "(", "\"Deleted Lambda S3 notification\"", ")", "return", "True" ]
Destroy S3 event. Args: app (str): Spinnaker Application name. env (str): Deployment environment. region (str): AWS region. Returns: bool: True upon successful completion.
[ "Destroy", "S3", "event", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/awslambda/s3_event/destroy_s3_event/destroy_s3_event.py#L26-L53
train
foremast/foremast
src/foremast/iam/destroy_iam/destroy_iam.py
destroy_iam
def destroy_iam(app='', env='dev', **_): """Destroy IAM Resources. Args: app (str): Spinnaker Application name. env (str): Deployment environment, i.e. dev, stage, prod. Returns: True upon successful completion. """ session = boto3.Session(profile_name=env) client = session.client('iam') generated = get_details(env=env, app=app) generated_iam = generated.iam() app_details = collections.namedtuple('AppDetails', generated_iam.keys()) details = app_details(**generated_iam) LOG.debug('Application details: %s', details) resource_action( client, action='remove_user_from_group', log_format='Removed user from group: %(UserName)s ~> %(GroupName)s', GroupName=details.group, UserName=details.user) resource_action(client, action='delete_user', log_format='Destroyed user: %(UserName)s', UserName=details.user) resource_action(client, action='delete_group', log_format='Destroyed group: %(GroupName)s', GroupName=details.group) resource_action( client, action='remove_role_from_instance_profile', log_format='Destroyed Instance Profile from Role: ' '%(InstanceProfileName)s ~> %(RoleName)s', InstanceProfileName=details.profile, RoleName=details.role) resource_action( client, action='delete_instance_profile', log_format='Destroyed Instance Profile: %(InstanceProfileName)s', InstanceProfileName=details.profile) role_policies = [] try: role_policies = resource_action( client, action='list_role_policies', log_format='Found Role Policies for %(RoleName)s.', RoleName=details.role)['PolicyNames'] except TypeError: LOG.info('Role %s not found.', details.role) for policy in role_policies: resource_action( client, action='delete_role_policy', log_format='Removed Inline Policy from Role: ' '%(PolicyName)s ~> %(RoleName)s', RoleName=details.role, PolicyName=policy) attached_role_policies = [] try: attached_role_policies = resource_action( client, action='list_attached_role_policies', log_format='Found attached Role Polices for %(RoleName)s.', RoleName=details.role)['AttachedPolicies'] except TypeError: LOG.info('Role %s not found.', details.role) for policy in attached_role_policies: resource_action( client, action='detach_role_policy', log_format='Detached Policy from Role: ' '%(PolicyArn)s ~> %(RoleName)s', RoleName=details.role, PolicyArn=policy['PolicyArn']) resource_action(client, action='delete_role', log_format='Destroyed Role: %(RoleName)s', RoleName=details.role)
python
def destroy_iam(app='', env='dev', **_): """Destroy IAM Resources. Args: app (str): Spinnaker Application name. env (str): Deployment environment, i.e. dev, stage, prod. Returns: True upon successful completion. """ session = boto3.Session(profile_name=env) client = session.client('iam') generated = get_details(env=env, app=app) generated_iam = generated.iam() app_details = collections.namedtuple('AppDetails', generated_iam.keys()) details = app_details(**generated_iam) LOG.debug('Application details: %s', details) resource_action( client, action='remove_user_from_group', log_format='Removed user from group: %(UserName)s ~> %(GroupName)s', GroupName=details.group, UserName=details.user) resource_action(client, action='delete_user', log_format='Destroyed user: %(UserName)s', UserName=details.user) resource_action(client, action='delete_group', log_format='Destroyed group: %(GroupName)s', GroupName=details.group) resource_action( client, action='remove_role_from_instance_profile', log_format='Destroyed Instance Profile from Role: ' '%(InstanceProfileName)s ~> %(RoleName)s', InstanceProfileName=details.profile, RoleName=details.role) resource_action( client, action='delete_instance_profile', log_format='Destroyed Instance Profile: %(InstanceProfileName)s', InstanceProfileName=details.profile) role_policies = [] try: role_policies = resource_action( client, action='list_role_policies', log_format='Found Role Policies for %(RoleName)s.', RoleName=details.role)['PolicyNames'] except TypeError: LOG.info('Role %s not found.', details.role) for policy in role_policies: resource_action( client, action='delete_role_policy', log_format='Removed Inline Policy from Role: ' '%(PolicyName)s ~> %(RoleName)s', RoleName=details.role, PolicyName=policy) attached_role_policies = [] try: attached_role_policies = resource_action( client, action='list_attached_role_policies', log_format='Found attached Role Polices for %(RoleName)s.', RoleName=details.role)['AttachedPolicies'] except TypeError: LOG.info('Role %s not found.', details.role) for policy in attached_role_policies: resource_action( client, action='detach_role_policy', log_format='Detached Policy from Role: ' '%(PolicyArn)s ~> %(RoleName)s', RoleName=details.role, PolicyArn=policy['PolicyArn']) resource_action(client, action='delete_role', log_format='Destroyed Role: %(RoleName)s', RoleName=details.role)
[ "def", "destroy_iam", "(", "app", "=", "''", ",", "env", "=", "'dev'", ",", "*", "*", "_", ")", ":", "session", "=", "boto3", ".", "Session", "(", "profile_name", "=", "env", ")", "client", "=", "session", ".", "client", "(", "'iam'", ")", "generated", "=", "get_details", "(", "env", "=", "env", ",", "app", "=", "app", ")", "generated_iam", "=", "generated", ".", "iam", "(", ")", "app_details", "=", "collections", ".", "namedtuple", "(", "'AppDetails'", ",", "generated_iam", ".", "keys", "(", ")", ")", "details", "=", "app_details", "(", "*", "*", "generated_iam", ")", "LOG", ".", "debug", "(", "'Application details: %s'", ",", "details", ")", "resource_action", "(", "client", ",", "action", "=", "'remove_user_from_group'", ",", "log_format", "=", "'Removed user from group: %(UserName)s ~> %(GroupName)s'", ",", "GroupName", "=", "details", ".", "group", ",", "UserName", "=", "details", ".", "user", ")", "resource_action", "(", "client", ",", "action", "=", "'delete_user'", ",", "log_format", "=", "'Destroyed user: %(UserName)s'", ",", "UserName", "=", "details", ".", "user", ")", "resource_action", "(", "client", ",", "action", "=", "'delete_group'", ",", "log_format", "=", "'Destroyed group: %(GroupName)s'", ",", "GroupName", "=", "details", ".", "group", ")", "resource_action", "(", "client", ",", "action", "=", "'remove_role_from_instance_profile'", ",", "log_format", "=", "'Destroyed Instance Profile from Role: '", "'%(InstanceProfileName)s ~> %(RoleName)s'", ",", "InstanceProfileName", "=", "details", ".", "profile", ",", "RoleName", "=", "details", ".", "role", ")", "resource_action", "(", "client", ",", "action", "=", "'delete_instance_profile'", ",", "log_format", "=", "'Destroyed Instance Profile: %(InstanceProfileName)s'", ",", "InstanceProfileName", "=", "details", ".", "profile", ")", "role_policies", "=", "[", "]", "try", ":", "role_policies", "=", "resource_action", "(", "client", ",", "action", "=", "'list_role_policies'", ",", "log_format", "=", "'Found Role Policies for %(RoleName)s.'", ",", "RoleName", "=", "details", ".", "role", ")", "[", "'PolicyNames'", "]", "except", "TypeError", ":", "LOG", ".", "info", "(", "'Role %s not found.'", ",", "details", ".", "role", ")", "for", "policy", "in", "role_policies", ":", "resource_action", "(", "client", ",", "action", "=", "'delete_role_policy'", ",", "log_format", "=", "'Removed Inline Policy from Role: '", "'%(PolicyName)s ~> %(RoleName)s'", ",", "RoleName", "=", "details", ".", "role", ",", "PolicyName", "=", "policy", ")", "attached_role_policies", "=", "[", "]", "try", ":", "attached_role_policies", "=", "resource_action", "(", "client", ",", "action", "=", "'list_attached_role_policies'", ",", "log_format", "=", "'Found attached Role Polices for %(RoleName)s.'", ",", "RoleName", "=", "details", ".", "role", ")", "[", "'AttachedPolicies'", "]", "except", "TypeError", ":", "LOG", ".", "info", "(", "'Role %s not found.'", ",", "details", ".", "role", ")", "for", "policy", "in", "attached_role_policies", ":", "resource_action", "(", "client", ",", "action", "=", "'detach_role_policy'", ",", "log_format", "=", "'Detached Policy from Role: '", "'%(PolicyArn)s ~> %(RoleName)s'", ",", "RoleName", "=", "details", ".", "role", ",", "PolicyArn", "=", "policy", "[", "'PolicyArn'", "]", ")", "resource_action", "(", "client", ",", "action", "=", "'delete_role'", ",", "log_format", "=", "'Destroyed Role: %(RoleName)s'", ",", "RoleName", "=", "details", ".", "role", ")" ]
Destroy IAM Resources. Args: app (str): Spinnaker Application name. env (str): Deployment environment, i.e. dev, stage, prod. Returns: True upon successful completion.
[ "Destroy", "IAM", "Resources", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/iam/destroy_iam/destroy_iam.py#L28-L108
train
foremast/foremast
src/foremast/utils/roles.py
get_role_arn
def get_role_arn(role_name, env, region): """Get role ARN given role name. Args: role_name (str): Role name to lookup env (str): Environment in which to lookup region (str): Region Returns: ARN if role found """ session = boto3.Session(profile_name=env, region_name=region) iam_client = session.client('iam') LOG.debug('Searching for %s.', role_name) role = iam_client.get_role(RoleName=role_name) role_arn = role['Role']['Arn'] LOG.debug("Found role's %s ARN %s", role_name, role_arn) return role_arn
python
def get_role_arn(role_name, env, region): """Get role ARN given role name. Args: role_name (str): Role name to lookup env (str): Environment in which to lookup region (str): Region Returns: ARN if role found """ session = boto3.Session(profile_name=env, region_name=region) iam_client = session.client('iam') LOG.debug('Searching for %s.', role_name) role = iam_client.get_role(RoleName=role_name) role_arn = role['Role']['Arn'] LOG.debug("Found role's %s ARN %s", role_name, role_arn) return role_arn
[ "def", "get_role_arn", "(", "role_name", ",", "env", ",", "region", ")", ":", "session", "=", "boto3", ".", "Session", "(", "profile_name", "=", "env", ",", "region_name", "=", "region", ")", "iam_client", "=", "session", ".", "client", "(", "'iam'", ")", "LOG", ".", "debug", "(", "'Searching for %s.'", ",", "role_name", ")", "role", "=", "iam_client", ".", "get_role", "(", "RoleName", "=", "role_name", ")", "role_arn", "=", "role", "[", "'Role'", "]", "[", "'Arn'", "]", "LOG", ".", "debug", "(", "\"Found role's %s ARN %s\"", ",", "role_name", ",", "role_arn", ")", "return", "role_arn" ]
Get role ARN given role name. Args: role_name (str): Role name to lookup env (str): Environment in which to lookup region (str): Region Returns: ARN if role found
[ "Get", "role", "ARN", "given", "role", "name", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/utils/roles.py#L9-L31
train
foremast/foremast
src/foremast/iam/construct_policy.py
render_policy_template
def render_policy_template( # pylint: disable=too-many-arguments account_number='', app='coreforrest', env='dev', group='forrest', items=None, pipeline_settings=None, region='us-east-1', service=''): """Render IAM Policy template. To support multiple statement blocks, JSON objects can be separated by a comma. This function attempts to turn any invalid JSON into a valid list based on this comma separated assumption. Args: account_number (str): AWS Account number. app (str): Name of Spinnaker Application. env (str): Environment/Account in AWS group (str):A Application group/namespace items (list): Resource names used to create a Policy per Resource. region (str): AWS region. pipeline_settings (dict): Settings from *pipeline.json*. service (str): Name of cloud service to find matching IAM Policy template. Returns: list: IAM Policy :obj:`dict` statements for the given service. """ statements = [] rendered_service_policy = get_template( 'infrastructure/iam/{0}.json.j2'.format(service), account_number=account_number, app=app, env=env, group=group, region=region, items=items, settings=pipeline_settings) try: statement_block = json.loads(rendered_service_policy) statements.append(statement_block) except ValueError: LOG.debug('Need to make %s template into list.', service) statements = json.loads('[{0}]'.format(rendered_service_policy)) LOG.debug('Rendered IAM Policy statements: %s', statements) return statements
python
def render_policy_template( # pylint: disable=too-many-arguments account_number='', app='coreforrest', env='dev', group='forrest', items=None, pipeline_settings=None, region='us-east-1', service=''): """Render IAM Policy template. To support multiple statement blocks, JSON objects can be separated by a comma. This function attempts to turn any invalid JSON into a valid list based on this comma separated assumption. Args: account_number (str): AWS Account number. app (str): Name of Spinnaker Application. env (str): Environment/Account in AWS group (str):A Application group/namespace items (list): Resource names used to create a Policy per Resource. region (str): AWS region. pipeline_settings (dict): Settings from *pipeline.json*. service (str): Name of cloud service to find matching IAM Policy template. Returns: list: IAM Policy :obj:`dict` statements for the given service. """ statements = [] rendered_service_policy = get_template( 'infrastructure/iam/{0}.json.j2'.format(service), account_number=account_number, app=app, env=env, group=group, region=region, items=items, settings=pipeline_settings) try: statement_block = json.loads(rendered_service_policy) statements.append(statement_block) except ValueError: LOG.debug('Need to make %s template into list.', service) statements = json.loads('[{0}]'.format(rendered_service_policy)) LOG.debug('Rendered IAM Policy statements: %s', statements) return statements
[ "def", "render_policy_template", "(", "# pylint: disable=too-many-arguments", "account_number", "=", "''", ",", "app", "=", "'coreforrest'", ",", "env", "=", "'dev'", ",", "group", "=", "'forrest'", ",", "items", "=", "None", ",", "pipeline_settings", "=", "None", ",", "region", "=", "'us-east-1'", ",", "service", "=", "''", ")", ":", "statements", "=", "[", "]", "rendered_service_policy", "=", "get_template", "(", "'infrastructure/iam/{0}.json.j2'", ".", "format", "(", "service", ")", ",", "account_number", "=", "account_number", ",", "app", "=", "app", ",", "env", "=", "env", ",", "group", "=", "group", ",", "region", "=", "region", ",", "items", "=", "items", ",", "settings", "=", "pipeline_settings", ")", "try", ":", "statement_block", "=", "json", ".", "loads", "(", "rendered_service_policy", ")", "statements", ".", "append", "(", "statement_block", ")", "except", "ValueError", ":", "LOG", ".", "debug", "(", "'Need to make %s template into list.'", ",", "service", ")", "statements", "=", "json", ".", "loads", "(", "'[{0}]'", ".", "format", "(", "rendered_service_policy", ")", ")", "LOG", ".", "debug", "(", "'Rendered IAM Policy statements: %s'", ",", "statements", ")", "return", "statements" ]
Render IAM Policy template. To support multiple statement blocks, JSON objects can be separated by a comma. This function attempts to turn any invalid JSON into a valid list based on this comma separated assumption. Args: account_number (str): AWS Account number. app (str): Name of Spinnaker Application. env (str): Environment/Account in AWS group (str):A Application group/namespace items (list): Resource names used to create a Policy per Resource. region (str): AWS region. pipeline_settings (dict): Settings from *pipeline.json*. service (str): Name of cloud service to find matching IAM Policy template. Returns: list: IAM Policy :obj:`dict` statements for the given service.
[ "Render", "IAM", "Policy", "template", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/iam/construct_policy.py#L57-L108
train
foremast/foremast
src/foremast/iam/construct_policy.py
construct_policy
def construct_policy(app='coreforrest', env='dev', group='forrest', region='us-east-1', pipeline_settings=None): """Assemble IAM Policy for _app_. Args: app (str): Name of Spinnaker Application. env (str): Environment/Account in AWS group (str):A Application group/namespace region (str): AWS region pipeline_settings (dict): Settings from *pipeline.json*. Returns: json: Custom IAM Policy for _app_. None: When no *services* have been defined in *pipeline.json*. """ LOG.info('Create custom IAM Policy for %s.', app) services = pipeline_settings.get('services', {}) LOG.debug('Found requested services: %s', services) services = auto_service(pipeline_settings=pipeline_settings, services=services) if services: credential = get_env_credential(env=env) account_number = credential['accountId'] statements = [] for service, value in services.items(): if value is True: items = [] elif isinstance(value, str): items = [value] else: items = value rendered_statements = render_policy_template( account_number=account_number, app=app, env=env, group=group, items=items, pipeline_settings=pipeline_settings, region=region, service=service) statements.extend(rendered_statements) if statements: policy_json = get_template('infrastructure/iam/wrapper.json.j2', statements=json.dumps(statements)) else: LOG.info('No services defined for %s.', app) policy_json = None return policy_json
python
def construct_policy(app='coreforrest', env='dev', group='forrest', region='us-east-1', pipeline_settings=None): """Assemble IAM Policy for _app_. Args: app (str): Name of Spinnaker Application. env (str): Environment/Account in AWS group (str):A Application group/namespace region (str): AWS region pipeline_settings (dict): Settings from *pipeline.json*. Returns: json: Custom IAM Policy for _app_. None: When no *services* have been defined in *pipeline.json*. """ LOG.info('Create custom IAM Policy for %s.', app) services = pipeline_settings.get('services', {}) LOG.debug('Found requested services: %s', services) services = auto_service(pipeline_settings=pipeline_settings, services=services) if services: credential = get_env_credential(env=env) account_number = credential['accountId'] statements = [] for service, value in services.items(): if value is True: items = [] elif isinstance(value, str): items = [value] else: items = value rendered_statements = render_policy_template( account_number=account_number, app=app, env=env, group=group, items=items, pipeline_settings=pipeline_settings, region=region, service=service) statements.extend(rendered_statements) if statements: policy_json = get_template('infrastructure/iam/wrapper.json.j2', statements=json.dumps(statements)) else: LOG.info('No services defined for %s.', app) policy_json = None return policy_json
[ "def", "construct_policy", "(", "app", "=", "'coreforrest'", ",", "env", "=", "'dev'", ",", "group", "=", "'forrest'", ",", "region", "=", "'us-east-1'", ",", "pipeline_settings", "=", "None", ")", ":", "LOG", ".", "info", "(", "'Create custom IAM Policy for %s.'", ",", "app", ")", "services", "=", "pipeline_settings", ".", "get", "(", "'services'", ",", "{", "}", ")", "LOG", ".", "debug", "(", "'Found requested services: %s'", ",", "services", ")", "services", "=", "auto_service", "(", "pipeline_settings", "=", "pipeline_settings", ",", "services", "=", "services", ")", "if", "services", ":", "credential", "=", "get_env_credential", "(", "env", "=", "env", ")", "account_number", "=", "credential", "[", "'accountId'", "]", "statements", "=", "[", "]", "for", "service", ",", "value", "in", "services", ".", "items", "(", ")", ":", "if", "value", "is", "True", ":", "items", "=", "[", "]", "elif", "isinstance", "(", "value", ",", "str", ")", ":", "items", "=", "[", "value", "]", "else", ":", "items", "=", "value", "rendered_statements", "=", "render_policy_template", "(", "account_number", "=", "account_number", ",", "app", "=", "app", ",", "env", "=", "env", ",", "group", "=", "group", ",", "items", "=", "items", ",", "pipeline_settings", "=", "pipeline_settings", ",", "region", "=", "region", ",", "service", "=", "service", ")", "statements", ".", "extend", "(", "rendered_statements", ")", "if", "statements", ":", "policy_json", "=", "get_template", "(", "'infrastructure/iam/wrapper.json.j2'", ",", "statements", "=", "json", ".", "dumps", "(", "statements", ")", ")", "else", ":", "LOG", ".", "info", "(", "'No services defined for %s.'", ",", "app", ")", "policy_json", "=", "None", "return", "policy_json" ]
Assemble IAM Policy for _app_. Args: app (str): Name of Spinnaker Application. env (str): Environment/Account in AWS group (str):A Application group/namespace region (str): AWS region pipeline_settings (dict): Settings from *pipeline.json*. Returns: json: Custom IAM Policy for _app_. None: When no *services* have been defined in *pipeline.json*.
[ "Assemble", "IAM", "Policy", "for", "_app_", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/iam/construct_policy.py#L111-L163
train
foremast/foremast
src/foremast/validate.py
validate_gate
def validate_gate(): """Check Gate connection.""" try: credentials = get_env_credential() LOG.debug('Found credentials: %s', credentials) LOG.info('Gate working.') except TypeError: LOG.fatal('Gate connection not valid: API_URL = %s', API_URL)
python
def validate_gate(): """Check Gate connection.""" try: credentials = get_env_credential() LOG.debug('Found credentials: %s', credentials) LOG.info('Gate working.') except TypeError: LOG.fatal('Gate connection not valid: API_URL = %s', API_URL)
[ "def", "validate_gate", "(", ")", ":", "try", ":", "credentials", "=", "get_env_credential", "(", ")", "LOG", ".", "debug", "(", "'Found credentials: %s'", ",", "credentials", ")", "LOG", ".", "info", "(", "'Gate working.'", ")", "except", "TypeError", ":", "LOG", ".", "fatal", "(", "'Gate connection not valid: API_URL = %s'", ",", "API_URL", ")" ]
Check Gate connection.
[ "Check", "Gate", "connection", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/validate.py#L10-L17
train
foremast/foremast
src/foremast/awslambda/s3_event/s3_event.py
create_s3_event
def create_s3_event(app_name, env, region, bucket, triggers): """Create S3 lambda events from triggers Args: app_name (str): name of the lambda function env (str): Environment/Account for lambda function region (str): AWS region of the lambda function triggers (list): List of triggers from the settings """ session = boto3.Session(profile_name=env, region_name=region) s3_client = session.client('s3') lambda_alias_arn = get_lambda_alias_arn(app_name, env, region) LOG.debug("Lambda ARN for lambda function %s is %s.", app_name, lambda_alias_arn) LOG.debug("Creating S3 events for bucket %s", bucket) # allow lambda trigger permission from bucket principal = 's3.amazonaws.com' statement_id = "{}_s3_{}".format(app_name, bucket).replace('.', '') source_arn = "arn:aws:s3:::{}".format(bucket) add_lambda_permissions( function=lambda_alias_arn, env=env, region=region, principal=principal, statement_id=statement_id, source_arn=source_arn) # configure events on s3 bucket to trigger lambda function template_kwargs = {"lambda_arn": lambda_alias_arn, "triggers": triggers} config = get_template(template_file='infrastructure/lambda/s3_event.json.j2', **template_kwargs) s3_client.put_bucket_notification_configuration(Bucket=bucket, NotificationConfiguration=json.loads(config)) LOG.info("Created lambda %s S3 event on bucket %s", app_name, bucket)
python
def create_s3_event(app_name, env, region, bucket, triggers): """Create S3 lambda events from triggers Args: app_name (str): name of the lambda function env (str): Environment/Account for lambda function region (str): AWS region of the lambda function triggers (list): List of triggers from the settings """ session = boto3.Session(profile_name=env, region_name=region) s3_client = session.client('s3') lambda_alias_arn = get_lambda_alias_arn(app_name, env, region) LOG.debug("Lambda ARN for lambda function %s is %s.", app_name, lambda_alias_arn) LOG.debug("Creating S3 events for bucket %s", bucket) # allow lambda trigger permission from bucket principal = 's3.amazonaws.com' statement_id = "{}_s3_{}".format(app_name, bucket).replace('.', '') source_arn = "arn:aws:s3:::{}".format(bucket) add_lambda_permissions( function=lambda_alias_arn, env=env, region=region, principal=principal, statement_id=statement_id, source_arn=source_arn) # configure events on s3 bucket to trigger lambda function template_kwargs = {"lambda_arn": lambda_alias_arn, "triggers": triggers} config = get_template(template_file='infrastructure/lambda/s3_event.json.j2', **template_kwargs) s3_client.put_bucket_notification_configuration(Bucket=bucket, NotificationConfiguration=json.loads(config)) LOG.info("Created lambda %s S3 event on bucket %s", app_name, bucket)
[ "def", "create_s3_event", "(", "app_name", ",", "env", ",", "region", ",", "bucket", ",", "triggers", ")", ":", "session", "=", "boto3", ".", "Session", "(", "profile_name", "=", "env", ",", "region_name", "=", "region", ")", "s3_client", "=", "session", ".", "client", "(", "'s3'", ")", "lambda_alias_arn", "=", "get_lambda_alias_arn", "(", "app_name", ",", "env", ",", "region", ")", "LOG", ".", "debug", "(", "\"Lambda ARN for lambda function %s is %s.\"", ",", "app_name", ",", "lambda_alias_arn", ")", "LOG", ".", "debug", "(", "\"Creating S3 events for bucket %s\"", ",", "bucket", ")", "# allow lambda trigger permission from bucket", "principal", "=", "'s3.amazonaws.com'", "statement_id", "=", "\"{}_s3_{}\"", ".", "format", "(", "app_name", ",", "bucket", ")", ".", "replace", "(", "'.'", ",", "''", ")", "source_arn", "=", "\"arn:aws:s3:::{}\"", ".", "format", "(", "bucket", ")", "add_lambda_permissions", "(", "function", "=", "lambda_alias_arn", ",", "env", "=", "env", ",", "region", "=", "region", ",", "principal", "=", "principal", ",", "statement_id", "=", "statement_id", ",", "source_arn", "=", "source_arn", ")", "# configure events on s3 bucket to trigger lambda function", "template_kwargs", "=", "{", "\"lambda_arn\"", ":", "lambda_alias_arn", ",", "\"triggers\"", ":", "triggers", "}", "config", "=", "get_template", "(", "template_file", "=", "'infrastructure/lambda/s3_event.json.j2'", ",", "*", "*", "template_kwargs", ")", "s3_client", ".", "put_bucket_notification_configuration", "(", "Bucket", "=", "bucket", ",", "NotificationConfiguration", "=", "json", ".", "loads", "(", "config", ")", ")", "LOG", ".", "info", "(", "\"Created lambda %s S3 event on bucket %s\"", ",", "app_name", ",", "bucket", ")" ]
Create S3 lambda events from triggers Args: app_name (str): name of the lambda function env (str): Environment/Account for lambda function region (str): AWS region of the lambda function triggers (list): List of triggers from the settings
[ "Create", "S3", "lambda", "events", "from", "triggers" ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/awslambda/s3_event/s3_event.py#L28-L62
train
foremast/foremast
src/foremast/utils/generate_filename.py
generate_packer_filename
def generate_packer_filename(provider, region, builder): """Generate a filename to be used by packer. Args: provider (str): Name of Spinnaker provider. region (str): Name of provider region to use. builder (str): Name of builder process type. Returns: str: Generated filename based on parameters. """ filename = '{0}_{1}_{2}.json'.format(provider, region, builder) return filename
python
def generate_packer_filename(provider, region, builder): """Generate a filename to be used by packer. Args: provider (str): Name of Spinnaker provider. region (str): Name of provider region to use. builder (str): Name of builder process type. Returns: str: Generated filename based on parameters. """ filename = '{0}_{1}_{2}.json'.format(provider, region, builder) return filename
[ "def", "generate_packer_filename", "(", "provider", ",", "region", ",", "builder", ")", ":", "filename", "=", "'{0}_{1}_{2}.json'", ".", "format", "(", "provider", ",", "region", ",", "builder", ")", "return", "filename" ]
Generate a filename to be used by packer. Args: provider (str): Name of Spinnaker provider. region (str): Name of provider region to use. builder (str): Name of builder process type. Returns: str: Generated filename based on parameters.
[ "Generate", "a", "filename", "to", "be", "used", "by", "packer", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/utils/generate_filename.py#L19-L32
train
foremast/foremast
src/foremast/utils/templates.py
get_template
def get_template(template_file='', **kwargs): """Get the Jinja2 template and renders with dict _kwargs_. Args: template_file (str): name of the template file kwargs: Keywords to use for rendering the Jinja2 template. Returns: String of rendered JSON template. """ template = get_template_object(template_file) LOG.info('Rendering template %s', template.filename) for key, value in kwargs.items(): LOG.debug('%s => %s', key, value) rendered_json = template.render(**kwargs) LOG.debug('Rendered JSON:\n%s', rendered_json) return rendered_json
python
def get_template(template_file='', **kwargs): """Get the Jinja2 template and renders with dict _kwargs_. Args: template_file (str): name of the template file kwargs: Keywords to use for rendering the Jinja2 template. Returns: String of rendered JSON template. """ template = get_template_object(template_file) LOG.info('Rendering template %s', template.filename) for key, value in kwargs.items(): LOG.debug('%s => %s', key, value) rendered_json = template.render(**kwargs) LOG.debug('Rendered JSON:\n%s', rendered_json) return rendered_json
[ "def", "get_template", "(", "template_file", "=", "''", ",", "*", "*", "kwargs", ")", ":", "template", "=", "get_template_object", "(", "template_file", ")", "LOG", ".", "info", "(", "'Rendering template %s'", ",", "template", ".", "filename", ")", "for", "key", ",", "value", "in", "kwargs", ".", "items", "(", ")", ":", "LOG", ".", "debug", "(", "'%s => %s'", ",", "key", ",", "value", ")", "rendered_json", "=", "template", ".", "render", "(", "*", "*", "kwargs", ")", "LOG", ".", "debug", "(", "'Rendered JSON:\\n%s'", ",", "rendered_json", ")", "return", "rendered_json" ]
Get the Jinja2 template and renders with dict _kwargs_. Args: template_file (str): name of the template file kwargs: Keywords to use for rendering the Jinja2 template. Returns: String of rendered JSON template.
[ "Get", "the", "Jinja2", "template", "and", "renders", "with", "dict", "_kwargs_", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/utils/templates.py#L70-L90
train
foremast/foremast
src/foremast/pipeline/renumerate_stages.py
renumerate_stages
def renumerate_stages(pipeline): """Renumber Pipeline Stage reference IDs to account for dependencies. stage order is defined in the templates. The ``refId`` field dictates if a stage should be mainline or parallel to other stages. * ``master`` - A mainline required stage. Other stages depend on it * ``branch`` - A stage that should be ran in parallel to master stages. * ``merge`` - A stage thatis parallel but other stages still depend on it. Args: pipeline (dict): Completed Pipeline ready for renumeration. Returns: dict: Pipeline ready to be sent to Spinnaker. """ stages = pipeline['stages'] main_index = 0 branch_index = 0 previous_refid = '' for stage in stages: current_refid = stage['refId'].lower() if current_refid == 'master': if main_index == 0: stage['requisiteStageRefIds'] = [] else: stage['requisiteStageRefIds'] = [str(main_index)] main_index += 1 stage['refId'] = str(main_index) elif current_refid == 'branch': # increments a branch_index to account for multiple parrallel stages if previous_refid == 'branch': branch_index += 1 else: branch_index = 0 stage['refId'] = str((main_index * 100) + branch_index) stage['requisiteStageRefIds'] = [str(main_index)] elif current_refid == 'merge': # TODO: Added logic to handle merge stages. pass previous_refid = current_refid LOG.debug('step=%(name)s\trefId=%(refId)s\t' 'requisiteStageRefIds=%(requisiteStageRefIds)s', stage) return pipeline
python
def renumerate_stages(pipeline): """Renumber Pipeline Stage reference IDs to account for dependencies. stage order is defined in the templates. The ``refId`` field dictates if a stage should be mainline or parallel to other stages. * ``master`` - A mainline required stage. Other stages depend on it * ``branch`` - A stage that should be ran in parallel to master stages. * ``merge`` - A stage thatis parallel but other stages still depend on it. Args: pipeline (dict): Completed Pipeline ready for renumeration. Returns: dict: Pipeline ready to be sent to Spinnaker. """ stages = pipeline['stages'] main_index = 0 branch_index = 0 previous_refid = '' for stage in stages: current_refid = stage['refId'].lower() if current_refid == 'master': if main_index == 0: stage['requisiteStageRefIds'] = [] else: stage['requisiteStageRefIds'] = [str(main_index)] main_index += 1 stage['refId'] = str(main_index) elif current_refid == 'branch': # increments a branch_index to account for multiple parrallel stages if previous_refid == 'branch': branch_index += 1 else: branch_index = 0 stage['refId'] = str((main_index * 100) + branch_index) stage['requisiteStageRefIds'] = [str(main_index)] elif current_refid == 'merge': # TODO: Added logic to handle merge stages. pass previous_refid = current_refid LOG.debug('step=%(name)s\trefId=%(refId)s\t' 'requisiteStageRefIds=%(requisiteStageRefIds)s', stage) return pipeline
[ "def", "renumerate_stages", "(", "pipeline", ")", ":", "stages", "=", "pipeline", "[", "'stages'", "]", "main_index", "=", "0", "branch_index", "=", "0", "previous_refid", "=", "''", "for", "stage", "in", "stages", ":", "current_refid", "=", "stage", "[", "'refId'", "]", ".", "lower", "(", ")", "if", "current_refid", "==", "'master'", ":", "if", "main_index", "==", "0", ":", "stage", "[", "'requisiteStageRefIds'", "]", "=", "[", "]", "else", ":", "stage", "[", "'requisiteStageRefIds'", "]", "=", "[", "str", "(", "main_index", ")", "]", "main_index", "+=", "1", "stage", "[", "'refId'", "]", "=", "str", "(", "main_index", ")", "elif", "current_refid", "==", "'branch'", ":", "# increments a branch_index to account for multiple parrallel stages", "if", "previous_refid", "==", "'branch'", ":", "branch_index", "+=", "1", "else", ":", "branch_index", "=", "0", "stage", "[", "'refId'", "]", "=", "str", "(", "(", "main_index", "*", "100", ")", "+", "branch_index", ")", "stage", "[", "'requisiteStageRefIds'", "]", "=", "[", "str", "(", "main_index", ")", "]", "elif", "current_refid", "==", "'merge'", ":", "# TODO: Added logic to handle merge stages.", "pass", "previous_refid", "=", "current_refid", "LOG", ".", "debug", "(", "'step=%(name)s\\trefId=%(refId)s\\t'", "'requisiteStageRefIds=%(requisiteStageRefIds)s'", ",", "stage", ")", "return", "pipeline" ]
Renumber Pipeline Stage reference IDs to account for dependencies. stage order is defined in the templates. The ``refId`` field dictates if a stage should be mainline or parallel to other stages. * ``master`` - A mainline required stage. Other stages depend on it * ``branch`` - A stage that should be ran in parallel to master stages. * ``merge`` - A stage thatis parallel but other stages still depend on it. Args: pipeline (dict): Completed Pipeline ready for renumeration. Returns: dict: Pipeline ready to be sent to Spinnaker.
[ "Renumber", "Pipeline", "Stage", "reference", "IDs", "to", "account", "for", "dependencies", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/pipeline/renumerate_stages.py#L22-L68
train
foremast/foremast
src/foremast/utils/tasks.py
post_task
def post_task(task_data, task_uri='/tasks'): """Create Spinnaker Task. Args: task_data (str): Task JSON definition. Returns: str: Spinnaker Task ID. Raises: AssertionError: Error response from Spinnaker. """ url = '{}/{}'.format(API_URL, task_uri.lstrip('/')) if isinstance(task_data, str): task_json = task_data else: task_json = json.dumps(task_data) resp = requests.post(url, data=task_json, headers=HEADERS, verify=GATE_CA_BUNDLE, cert=GATE_CLIENT_CERT) resp_json = resp.json() LOG.debug(resp_json) assert resp.ok, 'Spinnaker communication error: {0}'.format(resp.text) return resp_json['ref']
python
def post_task(task_data, task_uri='/tasks'): """Create Spinnaker Task. Args: task_data (str): Task JSON definition. Returns: str: Spinnaker Task ID. Raises: AssertionError: Error response from Spinnaker. """ url = '{}/{}'.format(API_URL, task_uri.lstrip('/')) if isinstance(task_data, str): task_json = task_data else: task_json = json.dumps(task_data) resp = requests.post(url, data=task_json, headers=HEADERS, verify=GATE_CA_BUNDLE, cert=GATE_CLIENT_CERT) resp_json = resp.json() LOG.debug(resp_json) assert resp.ok, 'Spinnaker communication error: {0}'.format(resp.text) return resp_json['ref']
[ "def", "post_task", "(", "task_data", ",", "task_uri", "=", "'/tasks'", ")", ":", "url", "=", "'{}/{}'", ".", "format", "(", "API_URL", ",", "task_uri", ".", "lstrip", "(", "'/'", ")", ")", "if", "isinstance", "(", "task_data", ",", "str", ")", ":", "task_json", "=", "task_data", "else", ":", "task_json", "=", "json", ".", "dumps", "(", "task_data", ")", "resp", "=", "requests", ".", "post", "(", "url", ",", "data", "=", "task_json", ",", "headers", "=", "HEADERS", ",", "verify", "=", "GATE_CA_BUNDLE", ",", "cert", "=", "GATE_CLIENT_CERT", ")", "resp_json", "=", "resp", ".", "json", "(", ")", "LOG", ".", "debug", "(", "resp_json", ")", "assert", "resp", ".", "ok", ",", "'Spinnaker communication error: {0}'", ".", "format", "(", "resp", ".", "text", ")", "return", "resp_json", "[", "'ref'", "]" ]
Create Spinnaker Task. Args: task_data (str): Task JSON definition. Returns: str: Spinnaker Task ID. Raises: AssertionError: Error response from Spinnaker.
[ "Create", "Spinnaker", "Task", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/utils/tasks.py#L29-L56
train
foremast/foremast
src/foremast/utils/tasks.py
_check_task
def _check_task(taskid): """Check Spinnaker Task status. Args: taskid (str): Existing Spinnaker Task ID. Returns: str: Task status. """ try: taskurl = taskid.get('ref', '0000') except AttributeError: taskurl = taskid taskid = taskurl.split('/tasks/')[-1] LOG.info('Checking taskid %s', taskid) url = '{}/tasks/{}'.format(API_URL, taskid) task_response = requests.get(url, headers=HEADERS, verify=GATE_CA_BUNDLE, cert=GATE_CLIENT_CERT) LOG.debug(task_response.json()) assert task_response.ok, 'Spinnaker communication error: {0}'.format(task_response.text) task_state = task_response.json() status = task_state['status'] LOG.info('Current task status: %s', status) if status == 'SUCCEEDED': # pylint: disable=no-else-return return status elif status == 'TERMINAL': raise SpinnakerTaskError(task_state) else: raise ValueError
python
def _check_task(taskid): """Check Spinnaker Task status. Args: taskid (str): Existing Spinnaker Task ID. Returns: str: Task status. """ try: taskurl = taskid.get('ref', '0000') except AttributeError: taskurl = taskid taskid = taskurl.split('/tasks/')[-1] LOG.info('Checking taskid %s', taskid) url = '{}/tasks/{}'.format(API_URL, taskid) task_response = requests.get(url, headers=HEADERS, verify=GATE_CA_BUNDLE, cert=GATE_CLIENT_CERT) LOG.debug(task_response.json()) assert task_response.ok, 'Spinnaker communication error: {0}'.format(task_response.text) task_state = task_response.json() status = task_state['status'] LOG.info('Current task status: %s', status) if status == 'SUCCEEDED': # pylint: disable=no-else-return return status elif status == 'TERMINAL': raise SpinnakerTaskError(task_state) else: raise ValueError
[ "def", "_check_task", "(", "taskid", ")", ":", "try", ":", "taskurl", "=", "taskid", ".", "get", "(", "'ref'", ",", "'0000'", ")", "except", "AttributeError", ":", "taskurl", "=", "taskid", "taskid", "=", "taskurl", ".", "split", "(", "'/tasks/'", ")", "[", "-", "1", "]", "LOG", ".", "info", "(", "'Checking taskid %s'", ",", "taskid", ")", "url", "=", "'{}/tasks/{}'", ".", "format", "(", "API_URL", ",", "taskid", ")", "task_response", "=", "requests", ".", "get", "(", "url", ",", "headers", "=", "HEADERS", ",", "verify", "=", "GATE_CA_BUNDLE", ",", "cert", "=", "GATE_CLIENT_CERT", ")", "LOG", ".", "debug", "(", "task_response", ".", "json", "(", ")", ")", "assert", "task_response", ".", "ok", ",", "'Spinnaker communication error: {0}'", ".", "format", "(", "task_response", ".", "text", ")", "task_state", "=", "task_response", ".", "json", "(", ")", "status", "=", "task_state", "[", "'status'", "]", "LOG", ".", "info", "(", "'Current task status: %s'", ",", "status", ")", "if", "status", "==", "'SUCCEEDED'", ":", "# pylint: disable=no-else-return", "return", "status", "elif", "status", "==", "'TERMINAL'", ":", "raise", "SpinnakerTaskError", "(", "task_state", ")", "else", ":", "raise", "ValueError" ]
Check Spinnaker Task status. Args: taskid (str): Existing Spinnaker Task ID. Returns: str: Task status.
[ "Check", "Spinnaker", "Task", "status", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/utils/tasks.py#L59-L94
train
foremast/foremast
src/foremast/utils/tasks.py
check_task
def check_task(taskid, timeout=DEFAULT_TASK_TIMEOUT, wait=2): """Wrap check_task. Args: taskid (str): Existing Spinnaker Task ID. timeout (int, optional): Consider Task failed after given seconds. wait (int, optional): Seconds to pause between polling attempts. Returns: str: Task status. Raises: AssertionError: API did not respond with a 200 status code. :obj:`foremast.exceptions.SpinnakerTaskInconclusiveError`: Task did not reach a terminal state before the given time out. """ max_attempts = int(timeout / wait) try: return retry_call( partial(_check_task, taskid), max_attempts=max_attempts, wait=wait, exceptions=(AssertionError, ValueError), ) except ValueError: raise SpinnakerTaskInconclusiveError('Task failed to complete in {0} seconds: {1}'.format(timeout, taskid))
python
def check_task(taskid, timeout=DEFAULT_TASK_TIMEOUT, wait=2): """Wrap check_task. Args: taskid (str): Existing Spinnaker Task ID. timeout (int, optional): Consider Task failed after given seconds. wait (int, optional): Seconds to pause between polling attempts. Returns: str: Task status. Raises: AssertionError: API did not respond with a 200 status code. :obj:`foremast.exceptions.SpinnakerTaskInconclusiveError`: Task did not reach a terminal state before the given time out. """ max_attempts = int(timeout / wait) try: return retry_call( partial(_check_task, taskid), max_attempts=max_attempts, wait=wait, exceptions=(AssertionError, ValueError), ) except ValueError: raise SpinnakerTaskInconclusiveError('Task failed to complete in {0} seconds: {1}'.format(timeout, taskid))
[ "def", "check_task", "(", "taskid", ",", "timeout", "=", "DEFAULT_TASK_TIMEOUT", ",", "wait", "=", "2", ")", ":", "max_attempts", "=", "int", "(", "timeout", "/", "wait", ")", "try", ":", "return", "retry_call", "(", "partial", "(", "_check_task", ",", "taskid", ")", ",", "max_attempts", "=", "max_attempts", ",", "wait", "=", "wait", ",", "exceptions", "=", "(", "AssertionError", ",", "ValueError", ")", ",", ")", "except", "ValueError", ":", "raise", "SpinnakerTaskInconclusiveError", "(", "'Task failed to complete in {0} seconds: {1}'", ".", "format", "(", "timeout", ",", "taskid", ")", ")" ]
Wrap check_task. Args: taskid (str): Existing Spinnaker Task ID. timeout (int, optional): Consider Task failed after given seconds. wait (int, optional): Seconds to pause between polling attempts. Returns: str: Task status. Raises: AssertionError: API did not respond with a 200 status code. :obj:`foremast.exceptions.SpinnakerTaskInconclusiveError`: Task did not reach a terminal state before the given time out.
[ "Wrap", "check_task", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/utils/tasks.py#L97-L122
train
foremast/foremast
src/foremast/utils/tasks.py
wait_for_task
def wait_for_task(task_data, task_uri='/tasks'): """Run task and check the result. Args: task_data (str): the task json to execute Returns: str: Task status. """ taskid = post_task(task_data, task_uri) if isinstance(task_data, str): json_data = json.loads(task_data) else: json_data = task_data # inspect the task to see if a timeout is configured job = json_data['job'][0] env = job.get('credentials') task_type = job.get('type') timeout = TASK_TIMEOUTS.get(env, dict()).get(task_type, DEFAULT_TASK_TIMEOUT) LOG.debug("Task %s will timeout after %s", task_type, timeout) return check_task(taskid, timeout)
python
def wait_for_task(task_data, task_uri='/tasks'): """Run task and check the result. Args: task_data (str): the task json to execute Returns: str: Task status. """ taskid = post_task(task_data, task_uri) if isinstance(task_data, str): json_data = json.loads(task_data) else: json_data = task_data # inspect the task to see if a timeout is configured job = json_data['job'][0] env = job.get('credentials') task_type = job.get('type') timeout = TASK_TIMEOUTS.get(env, dict()).get(task_type, DEFAULT_TASK_TIMEOUT) LOG.debug("Task %s will timeout after %s", task_type, timeout) return check_task(taskid, timeout)
[ "def", "wait_for_task", "(", "task_data", ",", "task_uri", "=", "'/tasks'", ")", ":", "taskid", "=", "post_task", "(", "task_data", ",", "task_uri", ")", "if", "isinstance", "(", "task_data", ",", "str", ")", ":", "json_data", "=", "json", ".", "loads", "(", "task_data", ")", "else", ":", "json_data", "=", "task_data", "# inspect the task to see if a timeout is configured", "job", "=", "json_data", "[", "'job'", "]", "[", "0", "]", "env", "=", "job", ".", "get", "(", "'credentials'", ")", "task_type", "=", "job", ".", "get", "(", "'type'", ")", "timeout", "=", "TASK_TIMEOUTS", ".", "get", "(", "env", ",", "dict", "(", ")", ")", ".", "get", "(", "task_type", ",", "DEFAULT_TASK_TIMEOUT", ")", "LOG", ".", "debug", "(", "\"Task %s will timeout after %s\"", ",", "task_type", ",", "timeout", ")", "return", "check_task", "(", "taskid", ",", "timeout", ")" ]
Run task and check the result. Args: task_data (str): the task json to execute Returns: str: Task status.
[ "Run", "task", "and", "check", "the", "result", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/utils/tasks.py#L125-L151
train
foremast/foremast
src/foremast/s3/__main__.py
main
def main(): """Create application.properties for a given application.""" logging.basicConfig(format=LOGGING_FORMAT) parser = argparse.ArgumentParser(description=main.__doc__) add_debug(parser) add_app(parser) add_env(parser) add_properties(parser) add_region(parser) add_artifact_path(parser) add_artifact_version(parser) args = parser.parse_args() logging.getLogger(__package__.split('.')[0]).setLevel(args.debug) LOG.debug('Args: %s', vars(args)) rendered_props = get_properties(args.properties) if rendered_props['pipeline']['type'] == 's3': s3app = S3Apps(app=args.app, env=args.env, region=args.region, prop_path=args.properties) s3app.create_bucket() s3deploy = S3Deployment( app=args.app, env=args.env, region=args.region, prop_path=args.properties, artifact_path=args.artifact_path, artifact_version=args.artifact_version) s3deploy.upload_artifacts() else: init_properties(**vars(args))
python
def main(): """Create application.properties for a given application.""" logging.basicConfig(format=LOGGING_FORMAT) parser = argparse.ArgumentParser(description=main.__doc__) add_debug(parser) add_app(parser) add_env(parser) add_properties(parser) add_region(parser) add_artifact_path(parser) add_artifact_version(parser) args = parser.parse_args() logging.getLogger(__package__.split('.')[0]).setLevel(args.debug) LOG.debug('Args: %s', vars(args)) rendered_props = get_properties(args.properties) if rendered_props['pipeline']['type'] == 's3': s3app = S3Apps(app=args.app, env=args.env, region=args.region, prop_path=args.properties) s3app.create_bucket() s3deploy = S3Deployment( app=args.app, env=args.env, region=args.region, prop_path=args.properties, artifact_path=args.artifact_path, artifact_version=args.artifact_version) s3deploy.upload_artifacts() else: init_properties(**vars(args))
[ "def", "main", "(", ")", ":", "logging", ".", "basicConfig", "(", "format", "=", "LOGGING_FORMAT", ")", "parser", "=", "argparse", ".", "ArgumentParser", "(", "description", "=", "main", ".", "__doc__", ")", "add_debug", "(", "parser", ")", "add_app", "(", "parser", ")", "add_env", "(", "parser", ")", "add_properties", "(", "parser", ")", "add_region", "(", "parser", ")", "add_artifact_path", "(", "parser", ")", "add_artifact_version", "(", "parser", ")", "args", "=", "parser", ".", "parse_args", "(", ")", "logging", ".", "getLogger", "(", "__package__", ".", "split", "(", "'.'", ")", "[", "0", "]", ")", ".", "setLevel", "(", "args", ".", "debug", ")", "LOG", ".", "debug", "(", "'Args: %s'", ",", "vars", "(", "args", ")", ")", "rendered_props", "=", "get_properties", "(", "args", ".", "properties", ")", "if", "rendered_props", "[", "'pipeline'", "]", "[", "'type'", "]", "==", "'s3'", ":", "s3app", "=", "S3Apps", "(", "app", "=", "args", ".", "app", ",", "env", "=", "args", ".", "env", ",", "region", "=", "args", ".", "region", ",", "prop_path", "=", "args", ".", "properties", ")", "s3app", ".", "create_bucket", "(", ")", "s3deploy", "=", "S3Deployment", "(", "app", "=", "args", ".", "app", ",", "env", "=", "args", ".", "env", ",", "region", "=", "args", ".", "region", ",", "prop_path", "=", "args", ".", "properties", ",", "artifact_path", "=", "args", ".", "artifact_path", ",", "artifact_version", "=", "args", ".", "artifact_version", ")", "s3deploy", ".", "upload_artifacts", "(", ")", "else", ":", "init_properties", "(", "*", "*", "vars", "(", "args", ")", ")" ]
Create application.properties for a given application.
[ "Create", "application", ".", "properties", "for", "a", "given", "application", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/s3/__main__.py#L34-L68
train
foremast/foremast
src/foremast/s3/create_archaius.py
init_properties
def init_properties(env='dev', app='unnecessary', **_): """Make sure _application.properties_ file exists in S3. For Applications with Archaius support, there needs to be a file where the cloud environment variable points to. Args: env (str): Deployment environment/account, i.e. dev, stage, prod. app (str): GitLab Project name. Returns: True when application.properties was found. False when application.properties needed to be created. """ aws_env = boto3.session.Session(profile_name=env) s3client = aws_env.resource('s3') generated = get_details(app=app, env=env) archaius = generated.archaius() archaius_file = ('{path}/application.properties').format(path=archaius['path']) try: s3client.Object(archaius['bucket'], archaius_file).get() LOG.info('Found: %(bucket)s/%(file)s', {'bucket': archaius['bucket'], 'file': archaius_file}) return True except boto3.exceptions.botocore.client.ClientError: s3client.Object(archaius['bucket'], archaius_file).put() LOG.info('Created: %(bucket)s/%(file)s', {'bucket': archaius['bucket'], 'file': archaius_file}) return False
python
def init_properties(env='dev', app='unnecessary', **_): """Make sure _application.properties_ file exists in S3. For Applications with Archaius support, there needs to be a file where the cloud environment variable points to. Args: env (str): Deployment environment/account, i.e. dev, stage, prod. app (str): GitLab Project name. Returns: True when application.properties was found. False when application.properties needed to be created. """ aws_env = boto3.session.Session(profile_name=env) s3client = aws_env.resource('s3') generated = get_details(app=app, env=env) archaius = generated.archaius() archaius_file = ('{path}/application.properties').format(path=archaius['path']) try: s3client.Object(archaius['bucket'], archaius_file).get() LOG.info('Found: %(bucket)s/%(file)s', {'bucket': archaius['bucket'], 'file': archaius_file}) return True except boto3.exceptions.botocore.client.ClientError: s3client.Object(archaius['bucket'], archaius_file).put() LOG.info('Created: %(bucket)s/%(file)s', {'bucket': archaius['bucket'], 'file': archaius_file}) return False
[ "def", "init_properties", "(", "env", "=", "'dev'", ",", "app", "=", "'unnecessary'", ",", "*", "*", "_", ")", ":", "aws_env", "=", "boto3", ".", "session", ".", "Session", "(", "profile_name", "=", "env", ")", "s3client", "=", "aws_env", ".", "resource", "(", "'s3'", ")", "generated", "=", "get_details", "(", "app", "=", "app", ",", "env", "=", "env", ")", "archaius", "=", "generated", ".", "archaius", "(", ")", "archaius_file", "=", "(", "'{path}/application.properties'", ")", ".", "format", "(", "path", "=", "archaius", "[", "'path'", "]", ")", "try", ":", "s3client", ".", "Object", "(", "archaius", "[", "'bucket'", "]", ",", "archaius_file", ")", ".", "get", "(", ")", "LOG", ".", "info", "(", "'Found: %(bucket)s/%(file)s'", ",", "{", "'bucket'", ":", "archaius", "[", "'bucket'", "]", ",", "'file'", ":", "archaius_file", "}", ")", "return", "True", "except", "boto3", ".", "exceptions", ".", "botocore", ".", "client", ".", "ClientError", ":", "s3client", ".", "Object", "(", "archaius", "[", "'bucket'", "]", ",", "archaius_file", ")", ".", "put", "(", ")", "LOG", ".", "info", "(", "'Created: %(bucket)s/%(file)s'", ",", "{", "'bucket'", ":", "archaius", "[", "'bucket'", "]", ",", "'file'", ":", "archaius_file", "}", ")", "return", "False" ]
Make sure _application.properties_ file exists in S3. For Applications with Archaius support, there needs to be a file where the cloud environment variable points to. Args: env (str): Deployment environment/account, i.e. dev, stage, prod. app (str): GitLab Project name. Returns: True when application.properties was found. False when application.properties needed to be created.
[ "Make", "sure", "_application", ".", "properties_", "file", "exists", "in", "S3", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/s3/create_archaius.py#L26-L55
train
foremast/foremast
src/foremast/awslambda/cloudwatch_event/cloudwatch_event.py
create_cloudwatch_event
def create_cloudwatch_event(app_name, env, region, rules): """Create cloudwatch event for lambda from rules. Args: app_name (str): name of the lambda function env (str): Environment/Account for lambda function region (str): AWS region of the lambda function rules (dict): Trigger rules from the settings """ session = boto3.Session(profile_name=env, region_name=region) cloudwatch_client = session.client('events') rule_name = rules.get('rule_name') schedule = rules.get('schedule') rule_description = rules.get('rule_description') json_input = rules.get('json_input', {}) if schedule is None: LOG.critical('Schedule is required and no schedule is defined!') raise InvalidEventConfiguration('Schedule is required and no schedule is defined!') if rule_name is None: LOG.critical('Rule name is required and no rule_name is defined!') raise InvalidEventConfiguration('Rule name is required and no rule_name is defined!') else: LOG.info('%s and %s', app_name, rule_name) rule_name = "{}_{}".format(app_name, rule_name.replace(' ', '_')) if rule_description is None: rule_description = "{} - {}".format(app_name, rule_name) lambda_arn = get_lambda_arn(app=app_name, account=env, region=region) # Add lambda permissions account_id = get_env_credential(env=env)['accountId'] principal = "events.amazonaws.com" statement_id = '{}_cloudwatch_{}'.format(app_name, rule_name) source_arn = 'arn:aws:events:{}:{}:rule/{}'.format(region, account_id, rule_name) add_lambda_permissions( function=lambda_arn, statement_id=statement_id, action='lambda:InvokeFunction', principal=principal, source_arn=source_arn, env=env, region=region, ) # Create Cloudwatch rule cloudwatch_client.put_rule( Name=rule_name, ScheduleExpression=schedule, State='ENABLED', Description=rule_description, ) targets = [] # TODO: read this one from file event-config-*.json json_payload = '{}'.format(json.dumps(json_input)) target = { "Id": app_name, "Arn": lambda_arn, "Input": json_payload, } targets.append(target) put_targets_response = cloudwatch_client.put_targets(Rule=rule_name, Targets=targets) LOG.debug('Cloudwatch put targets response: %s', put_targets_response) LOG.info('Created Cloudwatch event "%s" with schedule: %s', rule_name, schedule)
python
def create_cloudwatch_event(app_name, env, region, rules): """Create cloudwatch event for lambda from rules. Args: app_name (str): name of the lambda function env (str): Environment/Account for lambda function region (str): AWS region of the lambda function rules (dict): Trigger rules from the settings """ session = boto3.Session(profile_name=env, region_name=region) cloudwatch_client = session.client('events') rule_name = rules.get('rule_name') schedule = rules.get('schedule') rule_description = rules.get('rule_description') json_input = rules.get('json_input', {}) if schedule is None: LOG.critical('Schedule is required and no schedule is defined!') raise InvalidEventConfiguration('Schedule is required and no schedule is defined!') if rule_name is None: LOG.critical('Rule name is required and no rule_name is defined!') raise InvalidEventConfiguration('Rule name is required and no rule_name is defined!') else: LOG.info('%s and %s', app_name, rule_name) rule_name = "{}_{}".format(app_name, rule_name.replace(' ', '_')) if rule_description is None: rule_description = "{} - {}".format(app_name, rule_name) lambda_arn = get_lambda_arn(app=app_name, account=env, region=region) # Add lambda permissions account_id = get_env_credential(env=env)['accountId'] principal = "events.amazonaws.com" statement_id = '{}_cloudwatch_{}'.format(app_name, rule_name) source_arn = 'arn:aws:events:{}:{}:rule/{}'.format(region, account_id, rule_name) add_lambda_permissions( function=lambda_arn, statement_id=statement_id, action='lambda:InvokeFunction', principal=principal, source_arn=source_arn, env=env, region=region, ) # Create Cloudwatch rule cloudwatch_client.put_rule( Name=rule_name, ScheduleExpression=schedule, State='ENABLED', Description=rule_description, ) targets = [] # TODO: read this one from file event-config-*.json json_payload = '{}'.format(json.dumps(json_input)) target = { "Id": app_name, "Arn": lambda_arn, "Input": json_payload, } targets.append(target) put_targets_response = cloudwatch_client.put_targets(Rule=rule_name, Targets=targets) LOG.debug('Cloudwatch put targets response: %s', put_targets_response) LOG.info('Created Cloudwatch event "%s" with schedule: %s', rule_name, schedule)
[ "def", "create_cloudwatch_event", "(", "app_name", ",", "env", ",", "region", ",", "rules", ")", ":", "session", "=", "boto3", ".", "Session", "(", "profile_name", "=", "env", ",", "region_name", "=", "region", ")", "cloudwatch_client", "=", "session", ".", "client", "(", "'events'", ")", "rule_name", "=", "rules", ".", "get", "(", "'rule_name'", ")", "schedule", "=", "rules", ".", "get", "(", "'schedule'", ")", "rule_description", "=", "rules", ".", "get", "(", "'rule_description'", ")", "json_input", "=", "rules", ".", "get", "(", "'json_input'", ",", "{", "}", ")", "if", "schedule", "is", "None", ":", "LOG", ".", "critical", "(", "'Schedule is required and no schedule is defined!'", ")", "raise", "InvalidEventConfiguration", "(", "'Schedule is required and no schedule is defined!'", ")", "if", "rule_name", "is", "None", ":", "LOG", ".", "critical", "(", "'Rule name is required and no rule_name is defined!'", ")", "raise", "InvalidEventConfiguration", "(", "'Rule name is required and no rule_name is defined!'", ")", "else", ":", "LOG", ".", "info", "(", "'%s and %s'", ",", "app_name", ",", "rule_name", ")", "rule_name", "=", "\"{}_{}\"", ".", "format", "(", "app_name", ",", "rule_name", ".", "replace", "(", "' '", ",", "'_'", ")", ")", "if", "rule_description", "is", "None", ":", "rule_description", "=", "\"{} - {}\"", ".", "format", "(", "app_name", ",", "rule_name", ")", "lambda_arn", "=", "get_lambda_arn", "(", "app", "=", "app_name", ",", "account", "=", "env", ",", "region", "=", "region", ")", "# Add lambda permissions", "account_id", "=", "get_env_credential", "(", "env", "=", "env", ")", "[", "'accountId'", "]", "principal", "=", "\"events.amazonaws.com\"", "statement_id", "=", "'{}_cloudwatch_{}'", ".", "format", "(", "app_name", ",", "rule_name", ")", "source_arn", "=", "'arn:aws:events:{}:{}:rule/{}'", ".", "format", "(", "region", ",", "account_id", ",", "rule_name", ")", "add_lambda_permissions", "(", "function", "=", "lambda_arn", ",", "statement_id", "=", "statement_id", ",", "action", "=", "'lambda:InvokeFunction'", ",", "principal", "=", "principal", ",", "source_arn", "=", "source_arn", ",", "env", "=", "env", ",", "region", "=", "region", ",", ")", "# Create Cloudwatch rule", "cloudwatch_client", ".", "put_rule", "(", "Name", "=", "rule_name", ",", "ScheduleExpression", "=", "schedule", ",", "State", "=", "'ENABLED'", ",", "Description", "=", "rule_description", ",", ")", "targets", "=", "[", "]", "# TODO: read this one from file event-config-*.json", "json_payload", "=", "'{}'", ".", "format", "(", "json", ".", "dumps", "(", "json_input", ")", ")", "target", "=", "{", "\"Id\"", ":", "app_name", ",", "\"Arn\"", ":", "lambda_arn", ",", "\"Input\"", ":", "json_payload", ",", "}", "targets", ".", "append", "(", "target", ")", "put_targets_response", "=", "cloudwatch_client", ".", "put_targets", "(", "Rule", "=", "rule_name", ",", "Targets", "=", "targets", ")", "LOG", ".", "debug", "(", "'Cloudwatch put targets response: %s'", ",", "put_targets_response", ")", "LOG", ".", "info", "(", "'Created Cloudwatch event \"%s\" with schedule: %s'", ",", "rule_name", ",", "schedule", ")" ]
Create cloudwatch event for lambda from rules. Args: app_name (str): name of the lambda function env (str): Environment/Account for lambda function region (str): AWS region of the lambda function rules (dict): Trigger rules from the settings
[ "Create", "cloudwatch", "event", "for", "lambda", "from", "rules", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/awslambda/cloudwatch_event/cloudwatch_event.py#L29-L98
train
foremast/foremast
src/foremast/awslambda/api_gateway_event/api_gateway_event.py
APIGateway.find_api_id
def find_api_id(self): """Given API name, find API ID.""" allapis = self.client.get_rest_apis() api_name = self.trigger_settings['api_name'] api_id = None for api in allapis['items']: if api['name'] == api_name: api_id = api['id'] self.log.info("Found API for: %s", api_name) break else: api_id = self.create_api() return api_id
python
def find_api_id(self): """Given API name, find API ID.""" allapis = self.client.get_rest_apis() api_name = self.trigger_settings['api_name'] api_id = None for api in allapis['items']: if api['name'] == api_name: api_id = api['id'] self.log.info("Found API for: %s", api_name) break else: api_id = self.create_api() return api_id
[ "def", "find_api_id", "(", "self", ")", ":", "allapis", "=", "self", ".", "client", ".", "get_rest_apis", "(", ")", "api_name", "=", "self", ".", "trigger_settings", "[", "'api_name'", "]", "api_id", "=", "None", "for", "api", "in", "allapis", "[", "'items'", "]", ":", "if", "api", "[", "'name'", "]", "==", "api_name", ":", "api_id", "=", "api", "[", "'id'", "]", "self", ".", "log", ".", "info", "(", "\"Found API for: %s\"", ",", "api_name", ")", "break", "else", ":", "api_id", "=", "self", ".", "create_api", "(", ")", "return", "api_id" ]
Given API name, find API ID.
[ "Given", "API", "name", "find", "API", "ID", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/awslambda/api_gateway_event/api_gateway_event.py#L59-L72
train
foremast/foremast
src/foremast/awslambda/api_gateway_event/api_gateway_event.py
APIGateway.find_resource_ids
def find_resource_ids(self): """Given a resource path and API Id, find resource Id.""" all_resources = self.client.get_resources(restApiId=self.api_id) parent_id = None resource_id = None for resource in all_resources['items']: if resource['path'] == "/": parent_id = resource['id'] if resource['path'] == self.trigger_settings['resource']: resource_id = resource['id'] self.log.info("Found Resource ID for: %s", resource['path']) return resource_id, parent_id
python
def find_resource_ids(self): """Given a resource path and API Id, find resource Id.""" all_resources = self.client.get_resources(restApiId=self.api_id) parent_id = None resource_id = None for resource in all_resources['items']: if resource['path'] == "/": parent_id = resource['id'] if resource['path'] == self.trigger_settings['resource']: resource_id = resource['id'] self.log.info("Found Resource ID for: %s", resource['path']) return resource_id, parent_id
[ "def", "find_resource_ids", "(", "self", ")", ":", "all_resources", "=", "self", ".", "client", ".", "get_resources", "(", "restApiId", "=", "self", ".", "api_id", ")", "parent_id", "=", "None", "resource_id", "=", "None", "for", "resource", "in", "all_resources", "[", "'items'", "]", ":", "if", "resource", "[", "'path'", "]", "==", "\"/\"", ":", "parent_id", "=", "resource", "[", "'id'", "]", "if", "resource", "[", "'path'", "]", "==", "self", ".", "trigger_settings", "[", "'resource'", "]", ":", "resource_id", "=", "resource", "[", "'id'", "]", "self", ".", "log", ".", "info", "(", "\"Found Resource ID for: %s\"", ",", "resource", "[", "'path'", "]", ")", "return", "resource_id", ",", "parent_id" ]
Given a resource path and API Id, find resource Id.
[ "Given", "a", "resource", "path", "and", "API", "Id", "find", "resource", "Id", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/awslambda/api_gateway_event/api_gateway_event.py#L74-L85
train
foremast/foremast
src/foremast/awslambda/api_gateway_event/api_gateway_event.py
APIGateway.add_lambda_integration
def add_lambda_integration(self): """Attach lambda found to API.""" lambda_uri = self.generate_uris()['lambda_uri'] self.client.put_integration( restApiId=self.api_id, resourceId=self.resource_id, httpMethod=self.trigger_settings['method'], integrationHttpMethod='POST', uri=lambda_uri, type='AWS') self.add_integration_response() self.log.info("Successfully added Lambda intergration to API")
python
def add_lambda_integration(self): """Attach lambda found to API.""" lambda_uri = self.generate_uris()['lambda_uri'] self.client.put_integration( restApiId=self.api_id, resourceId=self.resource_id, httpMethod=self.trigger_settings['method'], integrationHttpMethod='POST', uri=lambda_uri, type='AWS') self.add_integration_response() self.log.info("Successfully added Lambda intergration to API")
[ "def", "add_lambda_integration", "(", "self", ")", ":", "lambda_uri", "=", "self", ".", "generate_uris", "(", ")", "[", "'lambda_uri'", "]", "self", ".", "client", ".", "put_integration", "(", "restApiId", "=", "self", ".", "api_id", ",", "resourceId", "=", "self", ".", "resource_id", ",", "httpMethod", "=", "self", ".", "trigger_settings", "[", "'method'", "]", ",", "integrationHttpMethod", "=", "'POST'", ",", "uri", "=", "lambda_uri", ",", "type", "=", "'AWS'", ")", "self", ".", "add_integration_response", "(", ")", "self", ".", "log", ".", "info", "(", "\"Successfully added Lambda intergration to API\"", ")" ]
Attach lambda found to API.
[ "Attach", "lambda", "found", "to", "API", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/awslambda/api_gateway_event/api_gateway_event.py#L87-L98
train
foremast/foremast
src/foremast/awslambda/api_gateway_event/api_gateway_event.py
APIGateway.add_integration_response
def add_integration_response(self): """Add an intergation response to the API for the lambda integration.""" self.client.put_integration_response( restApiId=self.api_id, resourceId=self.resource_id, httpMethod=self.trigger_settings['method'], statusCode='200', responseTemplates={'application/json': ''})
python
def add_integration_response(self): """Add an intergation response to the API for the lambda integration.""" self.client.put_integration_response( restApiId=self.api_id, resourceId=self.resource_id, httpMethod=self.trigger_settings['method'], statusCode='200', responseTemplates={'application/json': ''})
[ "def", "add_integration_response", "(", "self", ")", ":", "self", ".", "client", ".", "put_integration_response", "(", "restApiId", "=", "self", ".", "api_id", ",", "resourceId", "=", "self", ".", "resource_id", ",", "httpMethod", "=", "self", ".", "trigger_settings", "[", "'method'", "]", ",", "statusCode", "=", "'200'", ",", "responseTemplates", "=", "{", "'application/json'", ":", "''", "}", ")" ]
Add an intergation response to the API for the lambda integration.
[ "Add", "an", "intergation", "response", "to", "the", "API", "for", "the", "lambda", "integration", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/awslambda/api_gateway_event/api_gateway_event.py#L100-L107
train
foremast/foremast
src/foremast/awslambda/api_gateway_event/api_gateway_event.py
APIGateway.add_permission
def add_permission(self): """Add permission to Lambda for the API Trigger.""" statement_id = '{}_api_{}'.format(self.app_name, self.trigger_settings['api_name']) principal = 'apigateway.amazonaws.com' lambda_alias_arn = get_lambda_alias_arn(self.app_name, self.env, self.region) lambda_unqualified_arn = get_lambda_arn(self.app_name, self.env, self.region) resource_name = self.trigger_settings.get('resource', '') resource_name = resource_name.replace('/', '') method_api_source_arn = 'arn:aws:execute-api:{}:{}:{}/{}/{}/{}'.format( self.region, self.account_id, self.api_id, self.env, self.trigger_settings['method'], resource_name) global_api_source_arn = 'arn:aws:execute-api:{}:{}:{}/*/*/{}'.format(self.region, self.account_id, self.api_id, resource_name) add_lambda_permissions( function=lambda_alias_arn, statement_id=statement_id + self.trigger_settings['method'], action='lambda:InvokeFunction', principal=principal, env=self.env, region=self.region, source_arn=method_api_source_arn) add_lambda_permissions( function=lambda_alias_arn, statement_id=statement_id, action='lambda:InvokeFunction', principal=principal, env=self.env, region=self.region, source_arn=global_api_source_arn) add_lambda_permissions( function=lambda_unqualified_arn, statement_id=statement_id + self.trigger_settings['method'], action='lambda:InvokeFunction', principal=principal, env=self.env, region=self.region, source_arn=method_api_source_arn) add_lambda_permissions( function=lambda_unqualified_arn, statement_id=statement_id, action='lambda:InvokeFunction', principal=principal, env=self.env, region=self.region, source_arn=global_api_source_arn)
python
def add_permission(self): """Add permission to Lambda for the API Trigger.""" statement_id = '{}_api_{}'.format(self.app_name, self.trigger_settings['api_name']) principal = 'apigateway.amazonaws.com' lambda_alias_arn = get_lambda_alias_arn(self.app_name, self.env, self.region) lambda_unqualified_arn = get_lambda_arn(self.app_name, self.env, self.region) resource_name = self.trigger_settings.get('resource', '') resource_name = resource_name.replace('/', '') method_api_source_arn = 'arn:aws:execute-api:{}:{}:{}/{}/{}/{}'.format( self.region, self.account_id, self.api_id, self.env, self.trigger_settings['method'], resource_name) global_api_source_arn = 'arn:aws:execute-api:{}:{}:{}/*/*/{}'.format(self.region, self.account_id, self.api_id, resource_name) add_lambda_permissions( function=lambda_alias_arn, statement_id=statement_id + self.trigger_settings['method'], action='lambda:InvokeFunction', principal=principal, env=self.env, region=self.region, source_arn=method_api_source_arn) add_lambda_permissions( function=lambda_alias_arn, statement_id=statement_id, action='lambda:InvokeFunction', principal=principal, env=self.env, region=self.region, source_arn=global_api_source_arn) add_lambda_permissions( function=lambda_unqualified_arn, statement_id=statement_id + self.trigger_settings['method'], action='lambda:InvokeFunction', principal=principal, env=self.env, region=self.region, source_arn=method_api_source_arn) add_lambda_permissions( function=lambda_unqualified_arn, statement_id=statement_id, action='lambda:InvokeFunction', principal=principal, env=self.env, region=self.region, source_arn=global_api_source_arn)
[ "def", "add_permission", "(", "self", ")", ":", "statement_id", "=", "'{}_api_{}'", ".", "format", "(", "self", ".", "app_name", ",", "self", ".", "trigger_settings", "[", "'api_name'", "]", ")", "principal", "=", "'apigateway.amazonaws.com'", "lambda_alias_arn", "=", "get_lambda_alias_arn", "(", "self", ".", "app_name", ",", "self", ".", "env", ",", "self", ".", "region", ")", "lambda_unqualified_arn", "=", "get_lambda_arn", "(", "self", ".", "app_name", ",", "self", ".", "env", ",", "self", ".", "region", ")", "resource_name", "=", "self", ".", "trigger_settings", ".", "get", "(", "'resource'", ",", "''", ")", "resource_name", "=", "resource_name", ".", "replace", "(", "'/'", ",", "''", ")", "method_api_source_arn", "=", "'arn:aws:execute-api:{}:{}:{}/{}/{}/{}'", ".", "format", "(", "self", ".", "region", ",", "self", ".", "account_id", ",", "self", ".", "api_id", ",", "self", ".", "env", ",", "self", ".", "trigger_settings", "[", "'method'", "]", ",", "resource_name", ")", "global_api_source_arn", "=", "'arn:aws:execute-api:{}:{}:{}/*/*/{}'", ".", "format", "(", "self", ".", "region", ",", "self", ".", "account_id", ",", "self", ".", "api_id", ",", "resource_name", ")", "add_lambda_permissions", "(", "function", "=", "lambda_alias_arn", ",", "statement_id", "=", "statement_id", "+", "self", ".", "trigger_settings", "[", "'method'", "]", ",", "action", "=", "'lambda:InvokeFunction'", ",", "principal", "=", "principal", ",", "env", "=", "self", ".", "env", ",", "region", "=", "self", ".", "region", ",", "source_arn", "=", "method_api_source_arn", ")", "add_lambda_permissions", "(", "function", "=", "lambda_alias_arn", ",", "statement_id", "=", "statement_id", ",", "action", "=", "'lambda:InvokeFunction'", ",", "principal", "=", "principal", ",", "env", "=", "self", ".", "env", ",", "region", "=", "self", ".", "region", ",", "source_arn", "=", "global_api_source_arn", ")", "add_lambda_permissions", "(", "function", "=", "lambda_unqualified_arn", ",", "statement_id", "=", "statement_id", "+", "self", ".", "trigger_settings", "[", "'method'", "]", ",", "action", "=", "'lambda:InvokeFunction'", ",", "principal", "=", "principal", ",", "env", "=", "self", ".", "env", ",", "region", "=", "self", ".", "region", ",", "source_arn", "=", "method_api_source_arn", ")", "add_lambda_permissions", "(", "function", "=", "lambda_unqualified_arn", ",", "statement_id", "=", "statement_id", ",", "action", "=", "'lambda:InvokeFunction'", ",", "principal", "=", "principal", ",", "env", "=", "self", ".", "env", ",", "region", "=", "self", ".", "region", ",", "source_arn", "=", "global_api_source_arn", ")" ]
Add permission to Lambda for the API Trigger.
[ "Add", "permission", "to", "Lambda", "for", "the", "API", "Trigger", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/awslambda/api_gateway_event/api_gateway_event.py#L109-L152
train
foremast/foremast
src/foremast/awslambda/api_gateway_event/api_gateway_event.py
APIGateway.create_api_deployment
def create_api_deployment(self): """Create API deployment of ENV name.""" try: self.client.create_deployment(restApiId=self.api_id, stageName=self.env) self.log.info('Created a deployment resource.') except botocore.exceptions.ClientError as error: error_code = error.response['Error']['Code'] if error_code == 'TooManyRequestsException': self.log.debug('Retrying. We have hit api limit.') else: self.log.debug('Retrying. We received %s.', error_code)
python
def create_api_deployment(self): """Create API deployment of ENV name.""" try: self.client.create_deployment(restApiId=self.api_id, stageName=self.env) self.log.info('Created a deployment resource.') except botocore.exceptions.ClientError as error: error_code = error.response['Error']['Code'] if error_code == 'TooManyRequestsException': self.log.debug('Retrying. We have hit api limit.') else: self.log.debug('Retrying. We received %s.', error_code)
[ "def", "create_api_deployment", "(", "self", ")", ":", "try", ":", "self", ".", "client", ".", "create_deployment", "(", "restApiId", "=", "self", ".", "api_id", ",", "stageName", "=", "self", ".", "env", ")", "self", ".", "log", ".", "info", "(", "'Created a deployment resource.'", ")", "except", "botocore", ".", "exceptions", ".", "ClientError", "as", "error", ":", "error_code", "=", "error", ".", "response", "[", "'Error'", "]", "[", "'Code'", "]", "if", "error_code", "==", "'TooManyRequestsException'", ":", "self", ".", "log", ".", "debug", "(", "'Retrying. We have hit api limit.'", ")", "else", ":", "self", ".", "log", ".", "debug", "(", "'Retrying. We received %s.'", ",", "error_code", ")" ]
Create API deployment of ENV name.
[ "Create", "API", "deployment", "of", "ENV", "name", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/awslambda/api_gateway_event/api_gateway_event.py#L155-L165
train
foremast/foremast
src/foremast/awslambda/api_gateway_event/api_gateway_event.py
APIGateway.create_api_key
def create_api_key(self): """Create API Key for API access.""" apikeys = self.client.get_api_keys() for key in apikeys['items']: if key['name'] == self.app_name: self.log.info("Key %s already exists", self.app_name) break else: self.client.create_api_key( name=self.app_name, enabled=True, stageKeys=[{ 'restApiId': self.api_id, 'stageName': self.env }]) self.log.info("Successfully created API Key %s. Look in the AWS console for the key", self.app_name)
python
def create_api_key(self): """Create API Key for API access.""" apikeys = self.client.get_api_keys() for key in apikeys['items']: if key['name'] == self.app_name: self.log.info("Key %s already exists", self.app_name) break else: self.client.create_api_key( name=self.app_name, enabled=True, stageKeys=[{ 'restApiId': self.api_id, 'stageName': self.env }]) self.log.info("Successfully created API Key %s. Look in the AWS console for the key", self.app_name)
[ "def", "create_api_key", "(", "self", ")", ":", "apikeys", "=", "self", ".", "client", ".", "get_api_keys", "(", ")", "for", "key", "in", "apikeys", "[", "'items'", "]", ":", "if", "key", "[", "'name'", "]", "==", "self", ".", "app_name", ":", "self", ".", "log", ".", "info", "(", "\"Key %s already exists\"", ",", "self", ".", "app_name", ")", "break", "else", ":", "self", ".", "client", ".", "create_api_key", "(", "name", "=", "self", ".", "app_name", ",", "enabled", "=", "True", ",", "stageKeys", "=", "[", "{", "'restApiId'", ":", "self", ".", "api_id", ",", "'stageName'", ":", "self", ".", "env", "}", "]", ")", "self", ".", "log", ".", "info", "(", "\"Successfully created API Key %s. Look in the AWS console for the key\"", ",", "self", ".", "app_name", ")" ]
Create API Key for API access.
[ "Create", "API", "Key", "for", "API", "access", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/awslambda/api_gateway_event/api_gateway_event.py#L167-L180
train
foremast/foremast
src/foremast/awslambda/api_gateway_event/api_gateway_event.py
APIGateway._format_base_path
def _format_base_path(self, api_name): """Format the base path name.""" name = self.app_name if self.app_name != api_name: name = '{0}-{1}'.format(self.app_name, api_name) return name
python
def _format_base_path(self, api_name): """Format the base path name.""" name = self.app_name if self.app_name != api_name: name = '{0}-{1}'.format(self.app_name, api_name) return name
[ "def", "_format_base_path", "(", "self", ",", "api_name", ")", ":", "name", "=", "self", ".", "app_name", "if", "self", ".", "app_name", "!=", "api_name", ":", "name", "=", "'{0}-{1}'", ".", "format", "(", "self", ".", "app_name", ",", "api_name", ")", "return", "name" ]
Format the base path name.
[ "Format", "the", "base", "path", "name", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/awslambda/api_gateway_event/api_gateway_event.py#L182-L187
train
foremast/foremast
src/foremast/awslambda/api_gateway_event/api_gateway_event.py
APIGateway.update_api_mappings
def update_api_mappings(self): """Create a cname for the API deployment.""" response_provider = None response_action = None domain = self.generated.apigateway()['domain'] try: response_provider = self.client.create_base_path_mapping( domainName=domain, basePath=self._format_base_path(self.trigger_settings['api_name']), restApiId=self.api_id, stage=self.env, ) response_action = 'API mapping added.' except botocore.exceptions.ClientError as error: error_code = error.response['Error']['Code'] if error_code == 'ConflictException': response_action = 'API mapping already exist.' else: response_action = 'Unknown error: {0}'.format(error_code) self.log.debug('Provider response: %s', response_provider) self.log.info(response_action) return response_provider
python
def update_api_mappings(self): """Create a cname for the API deployment.""" response_provider = None response_action = None domain = self.generated.apigateway()['domain'] try: response_provider = self.client.create_base_path_mapping( domainName=domain, basePath=self._format_base_path(self.trigger_settings['api_name']), restApiId=self.api_id, stage=self.env, ) response_action = 'API mapping added.' except botocore.exceptions.ClientError as error: error_code = error.response['Error']['Code'] if error_code == 'ConflictException': response_action = 'API mapping already exist.' else: response_action = 'Unknown error: {0}'.format(error_code) self.log.debug('Provider response: %s', response_provider) self.log.info(response_action) return response_provider
[ "def", "update_api_mappings", "(", "self", ")", ":", "response_provider", "=", "None", "response_action", "=", "None", "domain", "=", "self", ".", "generated", ".", "apigateway", "(", ")", "[", "'domain'", "]", "try", ":", "response_provider", "=", "self", ".", "client", ".", "create_base_path_mapping", "(", "domainName", "=", "domain", ",", "basePath", "=", "self", ".", "_format_base_path", "(", "self", ".", "trigger_settings", "[", "'api_name'", "]", ")", ",", "restApiId", "=", "self", ".", "api_id", ",", "stage", "=", "self", ".", "env", ",", ")", "response_action", "=", "'API mapping added.'", "except", "botocore", ".", "exceptions", ".", "ClientError", "as", "error", ":", "error_code", "=", "error", ".", "response", "[", "'Error'", "]", "[", "'Code'", "]", "if", "error_code", "==", "'ConflictException'", ":", "response_action", "=", "'API mapping already exist.'", "else", ":", "response_action", "=", "'Unknown error: {0}'", ".", "format", "(", "error_code", ")", "self", ".", "log", ".", "debug", "(", "'Provider response: %s'", ",", "response_provider", ")", "self", ".", "log", ".", "info", "(", "response_action", ")", "return", "response_provider" ]
Create a cname for the API deployment.
[ "Create", "a", "cname", "for", "the", "API", "deployment", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/awslambda/api_gateway_event/api_gateway_event.py#L189-L210
train
foremast/foremast
src/foremast/awslambda/api_gateway_event/api_gateway_event.py
APIGateway.generate_uris
def generate_uris(self): """Generate several lambda uris.""" lambda_arn = "arn:aws:execute-api:{0}:{1}:{2}/*/{3}/{4}".format(self.region, self.account_id, self.api_id, self.trigger_settings['method'], self.trigger_settings['resource']) lambda_uri = ("arn:aws:apigateway:{0}:lambda:path/{1}/functions/" "arn:aws:lambda:{0}:{2}:function:{3}/invocations").format(self.region, self.api_version, self.account_id, self.app_name) api_dns = "https://{0}.execute-api.{1}.amazonaws.com/{2}".format(self.api_id, self.region, self.env) uri_dict = {'lambda_arn': lambda_arn, 'lambda_uri': lambda_uri, 'api_dns': api_dns} return uri_dict
python
def generate_uris(self): """Generate several lambda uris.""" lambda_arn = "arn:aws:execute-api:{0}:{1}:{2}/*/{3}/{4}".format(self.region, self.account_id, self.api_id, self.trigger_settings['method'], self.trigger_settings['resource']) lambda_uri = ("arn:aws:apigateway:{0}:lambda:path/{1}/functions/" "arn:aws:lambda:{0}:{2}:function:{3}/invocations").format(self.region, self.api_version, self.account_id, self.app_name) api_dns = "https://{0}.execute-api.{1}.amazonaws.com/{2}".format(self.api_id, self.region, self.env) uri_dict = {'lambda_arn': lambda_arn, 'lambda_uri': lambda_uri, 'api_dns': api_dns} return uri_dict
[ "def", "generate_uris", "(", "self", ")", ":", "lambda_arn", "=", "\"arn:aws:execute-api:{0}:{1}:{2}/*/{3}/{4}\"", ".", "format", "(", "self", ".", "region", ",", "self", ".", "account_id", ",", "self", ".", "api_id", ",", "self", ".", "trigger_settings", "[", "'method'", "]", ",", "self", ".", "trigger_settings", "[", "'resource'", "]", ")", "lambda_uri", "=", "(", "\"arn:aws:apigateway:{0}:lambda:path/{1}/functions/\"", "\"arn:aws:lambda:{0}:{2}:function:{3}/invocations\"", ")", ".", "format", "(", "self", ".", "region", ",", "self", ".", "api_version", ",", "self", ".", "account_id", ",", "self", ".", "app_name", ")", "api_dns", "=", "\"https://{0}.execute-api.{1}.amazonaws.com/{2}\"", ".", "format", "(", "self", ".", "api_id", ",", "self", ".", "region", ",", "self", ".", "env", ")", "uri_dict", "=", "{", "'lambda_arn'", ":", "lambda_arn", ",", "'lambda_uri'", ":", "lambda_uri", ",", "'api_dns'", ":", "api_dns", "}", "return", "uri_dict" ]
Generate several lambda uris.
[ "Generate", "several", "lambda", "uris", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/awslambda/api_gateway_event/api_gateway_event.py#L212-L225
train
foremast/foremast
src/foremast/awslambda/api_gateway_event/api_gateway_event.py
APIGateway.create_api
def create_api(self): """Create the REST API.""" created_api = self.client.create_rest_api(name=self.trigger_settings.get('api_name', self.app_name)) api_id = created_api['id'] self.log.info("Successfully created API") return api_id
python
def create_api(self): """Create the REST API.""" created_api = self.client.create_rest_api(name=self.trigger_settings.get('api_name', self.app_name)) api_id = created_api['id'] self.log.info("Successfully created API") return api_id
[ "def", "create_api", "(", "self", ")", ":", "created_api", "=", "self", ".", "client", ".", "create_rest_api", "(", "name", "=", "self", ".", "trigger_settings", ".", "get", "(", "'api_name'", ",", "self", ".", "app_name", ")", ")", "api_id", "=", "created_api", "[", "'id'", "]", "self", ".", "log", ".", "info", "(", "\"Successfully created API\"", ")", "return", "api_id" ]
Create the REST API.
[ "Create", "the", "REST", "API", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/awslambda/api_gateway_event/api_gateway_event.py#L227-L232
train
foremast/foremast
src/foremast/awslambda/api_gateway_event/api_gateway_event.py
APIGateway.create_resource
def create_resource(self, parent_id=""): """Create the specified resource. Args: parent_id (str): The resource ID of the parent resource in API Gateway """ resource_name = self.trigger_settings.get('resource', '') resource_name = resource_name.replace('/', '') if not self.resource_id: created_resource = self.client.create_resource( restApiId=self.api_id, parentId=parent_id, pathPart=resource_name) self.resource_id = created_resource['id'] self.log.info("Successfully created resource") else: self.log.info("Resource already exists. To update resource please delete existing resource: %s", resource_name)
python
def create_resource(self, parent_id=""): """Create the specified resource. Args: parent_id (str): The resource ID of the parent resource in API Gateway """ resource_name = self.trigger_settings.get('resource', '') resource_name = resource_name.replace('/', '') if not self.resource_id: created_resource = self.client.create_resource( restApiId=self.api_id, parentId=parent_id, pathPart=resource_name) self.resource_id = created_resource['id'] self.log.info("Successfully created resource") else: self.log.info("Resource already exists. To update resource please delete existing resource: %s", resource_name)
[ "def", "create_resource", "(", "self", ",", "parent_id", "=", "\"\"", ")", ":", "resource_name", "=", "self", ".", "trigger_settings", ".", "get", "(", "'resource'", ",", "''", ")", "resource_name", "=", "resource_name", ".", "replace", "(", "'/'", ",", "''", ")", "if", "not", "self", ".", "resource_id", ":", "created_resource", "=", "self", ".", "client", ".", "create_resource", "(", "restApiId", "=", "self", ".", "api_id", ",", "parentId", "=", "parent_id", ",", "pathPart", "=", "resource_name", ")", "self", ".", "resource_id", "=", "created_resource", "[", "'id'", "]", "self", ".", "log", ".", "info", "(", "\"Successfully created resource\"", ")", "else", ":", "self", ".", "log", ".", "info", "(", "\"Resource already exists. To update resource please delete existing resource: %s\"", ",", "resource_name", ")" ]
Create the specified resource. Args: parent_id (str): The resource ID of the parent resource in API Gateway
[ "Create", "the", "specified", "resource", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/awslambda/api_gateway_event/api_gateway_event.py#L234-L249
train
foremast/foremast
src/foremast/awslambda/api_gateway_event/api_gateway_event.py
APIGateway.attach_method
def attach_method(self, resource_id): """Attach the defined method.""" try: _response = self.client.put_method( restApiId=self.api_id, resourceId=resource_id, httpMethod=self.trigger_settings['method'], authorizationType="NONE", apiKeyRequired=False, ) self.log.debug('Response for resource (%s) push authorization: %s', resource_id, _response) _response = self.client.put_method_response( restApiId=self.api_id, resourceId=resource_id, httpMethod=self.trigger_settings['method'], statusCode='200') self.log.debug('Response for resource (%s) no authorization: %s', resource_id, _response) self.log.info("Successfully attached method: %s", self.trigger_settings['method']) except botocore.exceptions.ClientError: self.log.info("Method %s already exists", self.trigger_settings['method'])
python
def attach_method(self, resource_id): """Attach the defined method.""" try: _response = self.client.put_method( restApiId=self.api_id, resourceId=resource_id, httpMethod=self.trigger_settings['method'], authorizationType="NONE", apiKeyRequired=False, ) self.log.debug('Response for resource (%s) push authorization: %s', resource_id, _response) _response = self.client.put_method_response( restApiId=self.api_id, resourceId=resource_id, httpMethod=self.trigger_settings['method'], statusCode='200') self.log.debug('Response for resource (%s) no authorization: %s', resource_id, _response) self.log.info("Successfully attached method: %s", self.trigger_settings['method']) except botocore.exceptions.ClientError: self.log.info("Method %s already exists", self.trigger_settings['method'])
[ "def", "attach_method", "(", "self", ",", "resource_id", ")", ":", "try", ":", "_response", "=", "self", ".", "client", ".", "put_method", "(", "restApiId", "=", "self", ".", "api_id", ",", "resourceId", "=", "resource_id", ",", "httpMethod", "=", "self", ".", "trigger_settings", "[", "'method'", "]", ",", "authorizationType", "=", "\"NONE\"", ",", "apiKeyRequired", "=", "False", ",", ")", "self", ".", "log", ".", "debug", "(", "'Response for resource (%s) push authorization: %s'", ",", "resource_id", ",", "_response", ")", "_response", "=", "self", ".", "client", ".", "put_method_response", "(", "restApiId", "=", "self", ".", "api_id", ",", "resourceId", "=", "resource_id", ",", "httpMethod", "=", "self", ".", "trigger_settings", "[", "'method'", "]", ",", "statusCode", "=", "'200'", ")", "self", ".", "log", ".", "debug", "(", "'Response for resource (%s) no authorization: %s'", ",", "resource_id", ",", "_response", ")", "self", ".", "log", ".", "info", "(", "\"Successfully attached method: %s\"", ",", "self", ".", "trigger_settings", "[", "'method'", "]", ")", "except", "botocore", ".", "exceptions", ".", "ClientError", ":", "self", ".", "log", ".", "info", "(", "\"Method %s already exists\"", ",", "self", ".", "trigger_settings", "[", "'method'", "]", ")" ]
Attach the defined method.
[ "Attach", "the", "defined", "method", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/awslambda/api_gateway_event/api_gateway_event.py#L251-L270
train
foremast/foremast
src/foremast/awslambda/api_gateway_event/api_gateway_event.py
APIGateway.setup_lambda_api
def setup_lambda_api(self): """A wrapper for all the steps needed to setup the integration.""" self.create_resource(self.parent_id) self.attach_method(self.resource_id) self.add_lambda_integration() self.add_permission() self.create_api_deployment() self.create_api_key() self.update_api_mappings()
python
def setup_lambda_api(self): """A wrapper for all the steps needed to setup the integration.""" self.create_resource(self.parent_id) self.attach_method(self.resource_id) self.add_lambda_integration() self.add_permission() self.create_api_deployment() self.create_api_key() self.update_api_mappings()
[ "def", "setup_lambda_api", "(", "self", ")", ":", "self", ".", "create_resource", "(", "self", ".", "parent_id", ")", "self", ".", "attach_method", "(", "self", ".", "resource_id", ")", "self", ".", "add_lambda_integration", "(", ")", "self", ".", "add_permission", "(", ")", "self", ".", "create_api_deployment", "(", ")", "self", ".", "create_api_key", "(", ")", "self", ".", "update_api_mappings", "(", ")" ]
A wrapper for all the steps needed to setup the integration.
[ "A", "wrapper", "for", "all", "the", "steps", "needed", "to", "setup", "the", "integration", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/awslambda/api_gateway_event/api_gateway_event.py#L272-L280
train
foremast/foremast
src/foremast/dns/__main__.py
main
def main(): """Run newer stuffs.""" logging.basicConfig(format=LOGGING_FORMAT) log = logging.getLogger(__name__) parser = argparse.ArgumentParser() add_debug(parser) add_app(parser) add_env(parser) add_region(parser) add_properties(parser) parser.add_argument("--elb-subnet", help="Subnetnet type, e.g. external, internal", required=True) args = parser.parse_args() logging.getLogger(__package__.split('.')[0]).setLevel(args.debug) log.debug('Parsed arguments: %s', args) spinnakerapps = SpinnakerDns( app=args.app, env=args.env, region=args.region, prop_path=args.properties, elb_subnet=args.elb_subnet) spinnakerapps.create_elb_dns()
python
def main(): """Run newer stuffs.""" logging.basicConfig(format=LOGGING_FORMAT) log = logging.getLogger(__name__) parser = argparse.ArgumentParser() add_debug(parser) add_app(parser) add_env(parser) add_region(parser) add_properties(parser) parser.add_argument("--elb-subnet", help="Subnetnet type, e.g. external, internal", required=True) args = parser.parse_args() logging.getLogger(__package__.split('.')[0]).setLevel(args.debug) log.debug('Parsed arguments: %s', args) spinnakerapps = SpinnakerDns( app=args.app, env=args.env, region=args.region, prop_path=args.properties, elb_subnet=args.elb_subnet) spinnakerapps.create_elb_dns()
[ "def", "main", "(", ")", ":", "logging", ".", "basicConfig", "(", "format", "=", "LOGGING_FORMAT", ")", "log", "=", "logging", ".", "getLogger", "(", "__name__", ")", "parser", "=", "argparse", ".", "ArgumentParser", "(", ")", "add_debug", "(", "parser", ")", "add_app", "(", "parser", ")", "add_env", "(", "parser", ")", "add_region", "(", "parser", ")", "add_properties", "(", "parser", ")", "parser", ".", "add_argument", "(", "\"--elb-subnet\"", ",", "help", "=", "\"Subnetnet type, e.g. external, internal\"", ",", "required", "=", "True", ")", "args", "=", "parser", ".", "parse_args", "(", ")", "logging", ".", "getLogger", "(", "__package__", ".", "split", "(", "'.'", ")", "[", "0", "]", ")", ".", "setLevel", "(", "args", ".", "debug", ")", "log", ".", "debug", "(", "'Parsed arguments: %s'", ",", "args", ")", "spinnakerapps", "=", "SpinnakerDns", "(", "app", "=", "args", ".", "app", ",", "env", "=", "args", ".", "env", ",", "region", "=", "args", ".", "region", ",", "prop_path", "=", "args", ".", "properties", ",", "elb_subnet", "=", "args", ".", "elb_subnet", ")", "spinnakerapps", ".", "create_elb_dns", "(", ")" ]
Run newer stuffs.
[ "Run", "newer", "stuffs", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/dns/__main__.py#L28-L48
train
foremast/foremast
src/foremast/securitygroup/create_securitygroup.py
SpinnakerSecurityGroup._validate_cidr
def _validate_cidr(self, rule): """Validate the cidr block in a rule. Returns: True: Upon successful completion. Raises: SpinnakerSecurityGroupCreationFailed: CIDR definition is invalid or the network range is too wide. """ try: network = ipaddress.IPv4Network(rule['app']) except (ipaddress.NetmaskValueError, ValueError) as error: raise SpinnakerSecurityGroupCreationFailed(error) self.log.debug('Validating CIDR: %s', network.exploded) return True
python
def _validate_cidr(self, rule): """Validate the cidr block in a rule. Returns: True: Upon successful completion. Raises: SpinnakerSecurityGroupCreationFailed: CIDR definition is invalid or the network range is too wide. """ try: network = ipaddress.IPv4Network(rule['app']) except (ipaddress.NetmaskValueError, ValueError) as error: raise SpinnakerSecurityGroupCreationFailed(error) self.log.debug('Validating CIDR: %s', network.exploded) return True
[ "def", "_validate_cidr", "(", "self", ",", "rule", ")", ":", "try", ":", "network", "=", "ipaddress", ".", "IPv4Network", "(", "rule", "[", "'app'", "]", ")", "except", "(", "ipaddress", ".", "NetmaskValueError", ",", "ValueError", ")", "as", "error", ":", "raise", "SpinnakerSecurityGroupCreationFailed", "(", "error", ")", "self", ".", "log", ".", "debug", "(", "'Validating CIDR: %s'", ",", "network", ".", "exploded", ")", "return", "True" ]
Validate the cidr block in a rule. Returns: True: Upon successful completion. Raises: SpinnakerSecurityGroupCreationFailed: CIDR definition is invalid or the network range is too wide.
[ "Validate", "the", "cidr", "block", "in", "a", "rule", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/securitygroup/create_securitygroup.py#L77-L94
train
foremast/foremast
src/foremast/securitygroup/create_securitygroup.py
SpinnakerSecurityGroup._process_rules
def _process_rules(self, rules): """Process rules into cidr and non-cidr lists. Args: rules (list): Allowed Security Group ports and protocols. Returns: (list, list): Security Group reference rules and custom CIDR rules. """ cidr = [] non_cidr = [] for rule in rules: if '.' in rule['app']: self.log.debug('Custom CIDR rule: %s', rule) self._validate_cidr(rule) cidr.append(rule) else: self.log.debug('SG reference rule: %s', rule) non_cidr.append(rule) self.log.debug('Custom CIDR rules: %s', cidr) self.log.debug('SG reference rules: %s', non_cidr) return non_cidr, cidr
python
def _process_rules(self, rules): """Process rules into cidr and non-cidr lists. Args: rules (list): Allowed Security Group ports and protocols. Returns: (list, list): Security Group reference rules and custom CIDR rules. """ cidr = [] non_cidr = [] for rule in rules: if '.' in rule['app']: self.log.debug('Custom CIDR rule: %s', rule) self._validate_cidr(rule) cidr.append(rule) else: self.log.debug('SG reference rule: %s', rule) non_cidr.append(rule) self.log.debug('Custom CIDR rules: %s', cidr) self.log.debug('SG reference rules: %s', non_cidr) return non_cidr, cidr
[ "def", "_process_rules", "(", "self", ",", "rules", ")", ":", "cidr", "=", "[", "]", "non_cidr", "=", "[", "]", "for", "rule", "in", "rules", ":", "if", "'.'", "in", "rule", "[", "'app'", "]", ":", "self", ".", "log", ".", "debug", "(", "'Custom CIDR rule: %s'", ",", "rule", ")", "self", ".", "_validate_cidr", "(", "rule", ")", "cidr", ".", "append", "(", "rule", ")", "else", ":", "self", ".", "log", ".", "debug", "(", "'SG reference rule: %s'", ",", "rule", ")", "non_cidr", ".", "append", "(", "rule", ")", "self", ".", "log", ".", "debug", "(", "'Custom CIDR rules: %s'", ",", "cidr", ")", "self", ".", "log", ".", "debug", "(", "'SG reference rules: %s'", ",", "non_cidr", ")", "return", "non_cidr", ",", "cidr" ]
Process rules into cidr and non-cidr lists. Args: rules (list): Allowed Security Group ports and protocols. Returns: (list, list): Security Group reference rules and custom CIDR rules.
[ "Process", "rules", "into", "cidr", "and", "non", "-", "cidr", "lists", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/securitygroup/create_securitygroup.py#L96-L119
train
foremast/foremast
src/foremast/securitygroup/create_securitygroup.py
SpinnakerSecurityGroup.add_tags
def add_tags(self): """Add tags to security group. Returns: True: Upon successful completion. """ session = boto3.session.Session(profile_name=self.env, region_name=self.region) resource = session.resource('ec2') group_id = get_security_group_id(self.app_name, self.env, self.region) security_group = resource.SecurityGroup(group_id) try: tag = security_group.create_tags( DryRun=False, Tags=[{ 'Key': 'app_group', 'Value': self.group }, { 'Key': 'app_name', 'Value': self.app_name }]) self.log.debug('Security group has been tagged: %s', tag) except botocore.exceptions.ClientError as error: self.log.warning(error) return True
python
def add_tags(self): """Add tags to security group. Returns: True: Upon successful completion. """ session = boto3.session.Session(profile_name=self.env, region_name=self.region) resource = session.resource('ec2') group_id = get_security_group_id(self.app_name, self.env, self.region) security_group = resource.SecurityGroup(group_id) try: tag = security_group.create_tags( DryRun=False, Tags=[{ 'Key': 'app_group', 'Value': self.group }, { 'Key': 'app_name', 'Value': self.app_name }]) self.log.debug('Security group has been tagged: %s', tag) except botocore.exceptions.ClientError as error: self.log.warning(error) return True
[ "def", "add_tags", "(", "self", ")", ":", "session", "=", "boto3", ".", "session", ".", "Session", "(", "profile_name", "=", "self", ".", "env", ",", "region_name", "=", "self", ".", "region", ")", "resource", "=", "session", ".", "resource", "(", "'ec2'", ")", "group_id", "=", "get_security_group_id", "(", "self", ".", "app_name", ",", "self", ".", "env", ",", "self", ".", "region", ")", "security_group", "=", "resource", ".", "SecurityGroup", "(", "group_id", ")", "try", ":", "tag", "=", "security_group", ".", "create_tags", "(", "DryRun", "=", "False", ",", "Tags", "=", "[", "{", "'Key'", ":", "'app_group'", ",", "'Value'", ":", "self", ".", "group", "}", ",", "{", "'Key'", ":", "'app_name'", ",", "'Value'", ":", "self", ".", "app_name", "}", "]", ")", "self", ".", "log", ".", "debug", "(", "'Security group has been tagged: %s'", ",", "tag", ")", "except", "botocore", ".", "exceptions", ".", "ClientError", "as", "error", ":", "self", ".", "log", ".", "warning", "(", "error", ")", "return", "True" ]
Add tags to security group. Returns: True: Upon successful completion.
[ "Add", "tags", "to", "security", "group", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/securitygroup/create_securitygroup.py#L121-L146
train
foremast/foremast
src/foremast/securitygroup/create_securitygroup.py
SpinnakerSecurityGroup.add_cidr_rules
def add_cidr_rules(self, rules): """Add cidr rules to security group via boto. Args: rules (list): Allowed Security Group ports and protocols. Returns: True: Upon successful completion. Raises: SpinnakerSecurityGroupError: boto3 call failed to add CIDR block to Security Group. """ session = boto3.session.Session(profile_name=self.env, region_name=self.region) client = session.client('ec2') group_id = get_security_group_id(self.app_name, self.env, self.region) for rule in rules: data = { 'DryRun': False, 'GroupId': group_id, 'IpPermissions': [{ 'IpProtocol': rule['protocol'], 'FromPort': rule['start_port'], 'ToPort': rule['end_port'], 'IpRanges': [{ 'CidrIp': rule['app'] }] }] } self.log.debug('Security Group rule: %s', data) try: client.authorize_security_group_ingress(**data) except botocore.exceptions.ClientError as error: if 'InvalidPermission.Duplicate' in str(error): self.log.debug('Duplicate rule exist, that is OK.') else: msg = 'Unable to add cidr rules to {}'.format(rule.get('app')) self.log.error(msg) raise SpinnakerSecurityGroupError(msg) return True
python
def add_cidr_rules(self, rules): """Add cidr rules to security group via boto. Args: rules (list): Allowed Security Group ports and protocols. Returns: True: Upon successful completion. Raises: SpinnakerSecurityGroupError: boto3 call failed to add CIDR block to Security Group. """ session = boto3.session.Session(profile_name=self.env, region_name=self.region) client = session.client('ec2') group_id = get_security_group_id(self.app_name, self.env, self.region) for rule in rules: data = { 'DryRun': False, 'GroupId': group_id, 'IpPermissions': [{ 'IpProtocol': rule['protocol'], 'FromPort': rule['start_port'], 'ToPort': rule['end_port'], 'IpRanges': [{ 'CidrIp': rule['app'] }] }] } self.log.debug('Security Group rule: %s', data) try: client.authorize_security_group_ingress(**data) except botocore.exceptions.ClientError as error: if 'InvalidPermission.Duplicate' in str(error): self.log.debug('Duplicate rule exist, that is OK.') else: msg = 'Unable to add cidr rules to {}'.format(rule.get('app')) self.log.error(msg) raise SpinnakerSecurityGroupError(msg) return True
[ "def", "add_cidr_rules", "(", "self", ",", "rules", ")", ":", "session", "=", "boto3", ".", "session", ".", "Session", "(", "profile_name", "=", "self", ".", "env", ",", "region_name", "=", "self", ".", "region", ")", "client", "=", "session", ".", "client", "(", "'ec2'", ")", "group_id", "=", "get_security_group_id", "(", "self", ".", "app_name", ",", "self", ".", "env", ",", "self", ".", "region", ")", "for", "rule", "in", "rules", ":", "data", "=", "{", "'DryRun'", ":", "False", ",", "'GroupId'", ":", "group_id", ",", "'IpPermissions'", ":", "[", "{", "'IpProtocol'", ":", "rule", "[", "'protocol'", "]", ",", "'FromPort'", ":", "rule", "[", "'start_port'", "]", ",", "'ToPort'", ":", "rule", "[", "'end_port'", "]", ",", "'IpRanges'", ":", "[", "{", "'CidrIp'", ":", "rule", "[", "'app'", "]", "}", "]", "}", "]", "}", "self", ".", "log", ".", "debug", "(", "'Security Group rule: %s'", ",", "data", ")", "try", ":", "client", ".", "authorize_security_group_ingress", "(", "*", "*", "data", ")", "except", "botocore", ".", "exceptions", ".", "ClientError", "as", "error", ":", "if", "'InvalidPermission.Duplicate'", "in", "str", "(", "error", ")", ":", "self", ".", "log", ".", "debug", "(", "'Duplicate rule exist, that is OK.'", ")", "else", ":", "msg", "=", "'Unable to add cidr rules to {}'", ".", "format", "(", "rule", ".", "get", "(", "'app'", ")", ")", "self", ".", "log", ".", "error", "(", "msg", ")", "raise", "SpinnakerSecurityGroupError", "(", "msg", ")", "return", "True" ]
Add cidr rules to security group via boto. Args: rules (list): Allowed Security Group ports and protocols. Returns: True: Upon successful completion. Raises: SpinnakerSecurityGroupError: boto3 call failed to add CIDR block to Security Group.
[ "Add", "cidr", "rules", "to", "security", "group", "via", "boto", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/securitygroup/create_securitygroup.py#L148-L193
train
foremast/foremast
src/foremast/securitygroup/create_securitygroup.py
SpinnakerSecurityGroup.update_default_rules
def update_default_rules(self): """Concatinate application and global security group rules.""" app_ingress = self.properties['security_group']['ingress'] ingress = conservative_merger.merge(DEFAULT_SECURITYGROUP_RULES, app_ingress) resolved_ingress = self.resolve_self_references(ingress) self.log.info('Updated default rules:\n%s', ingress) return resolved_ingress
python
def update_default_rules(self): """Concatinate application and global security group rules.""" app_ingress = self.properties['security_group']['ingress'] ingress = conservative_merger.merge(DEFAULT_SECURITYGROUP_RULES, app_ingress) resolved_ingress = self.resolve_self_references(ingress) self.log.info('Updated default rules:\n%s', ingress) return resolved_ingress
[ "def", "update_default_rules", "(", "self", ")", ":", "app_ingress", "=", "self", ".", "properties", "[", "'security_group'", "]", "[", "'ingress'", "]", "ingress", "=", "conservative_merger", ".", "merge", "(", "DEFAULT_SECURITYGROUP_RULES", ",", "app_ingress", ")", "resolved_ingress", "=", "self", ".", "resolve_self_references", "(", "ingress", ")", "self", ".", "log", ".", "info", "(", "'Updated default rules:\\n%s'", ",", "ingress", ")", "return", "resolved_ingress" ]
Concatinate application and global security group rules.
[ "Concatinate", "application", "and", "global", "security", "group", "rules", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/securitygroup/create_securitygroup.py#L202-L208
train
foremast/foremast
src/foremast/securitygroup/create_securitygroup.py
SpinnakerSecurityGroup._create_security_group
def _create_security_group(self, ingress): """Send a POST to spinnaker to create a new security group. Returns: boolean: True if created successfully """ template_kwargs = { 'app': self.app_name, 'env': self.env, 'region': self.region, 'vpc': get_vpc_id(self.env, self.region), 'description': self.properties['security_group']['description'], 'ingress': ingress, } secgroup_json = get_template( template_file='infrastructure/securitygroup_data.json.j2', formats=self.generated, **template_kwargs) wait_for_task(secgroup_json) return True
python
def _create_security_group(self, ingress): """Send a POST to spinnaker to create a new security group. Returns: boolean: True if created successfully """ template_kwargs = { 'app': self.app_name, 'env': self.env, 'region': self.region, 'vpc': get_vpc_id(self.env, self.region), 'description': self.properties['security_group']['description'], 'ingress': ingress, } secgroup_json = get_template( template_file='infrastructure/securitygroup_data.json.j2', formats=self.generated, **template_kwargs) wait_for_task(secgroup_json) return True
[ "def", "_create_security_group", "(", "self", ",", "ingress", ")", ":", "template_kwargs", "=", "{", "'app'", ":", "self", ".", "app_name", ",", "'env'", ":", "self", ".", "env", ",", "'region'", ":", "self", ".", "region", ",", "'vpc'", ":", "get_vpc_id", "(", "self", ".", "env", ",", "self", ".", "region", ")", ",", "'description'", ":", "self", ".", "properties", "[", "'security_group'", "]", "[", "'description'", "]", ",", "'ingress'", ":", "ingress", ",", "}", "secgroup_json", "=", "get_template", "(", "template_file", "=", "'infrastructure/securitygroup_data.json.j2'", ",", "formats", "=", "self", ".", "generated", ",", "*", "*", "template_kwargs", ")", "wait_for_task", "(", "secgroup_json", ")", "return", "True" ]
Send a POST to spinnaker to create a new security group. Returns: boolean: True if created successfully
[ "Send", "a", "POST", "to", "spinnaker", "to", "create", "a", "new", "security", "group", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/securitygroup/create_securitygroup.py#L210-L230
train
foremast/foremast
src/foremast/securitygroup/create_securitygroup.py
SpinnakerSecurityGroup.create_security_group
def create_security_group(self): # noqa """Send a POST to spinnaker to create or update a security group. Returns: boolean: True if created successfully Raises: ForemastConfigurationFileError: Missing environment configuration or misconfigured Security Group definition. """ ingress_rules = [] try: security_id = get_security_group_id(name=self.app_name, env=self.env, region=self.region) except (SpinnakerSecurityGroupError, AssertionError): self._create_security_group(ingress_rules) else: self.log.debug('Security Group ID %s found for %s.', security_id, self.app_name) try: ingress = self.update_default_rules() except KeyError: msg = 'Possible missing configuration for "{0}".'.format(self.env) self.log.error(msg) raise ForemastConfigurationFileError(msg) for app in ingress: rules = ingress[app] # Essentially we have two formats: simple, advanced # - simple: is just a list of ports # - advanced: selects ports ranges and protocols for rule in rules: ingress_rule = self.create_ingress_rule(app, rule) ingress_rules.append(ingress_rule) ingress_rules_no_cidr, ingress_rules_cidr = self._process_rules(ingress_rules) self._create_security_group(ingress_rules_no_cidr) # Append cidr rules self.add_cidr_rules(ingress_rules_cidr) # Tag security group self.add_tags() self.log.info('Successfully created %s security group', self.app_name) return True
python
def create_security_group(self): # noqa """Send a POST to spinnaker to create or update a security group. Returns: boolean: True if created successfully Raises: ForemastConfigurationFileError: Missing environment configuration or misconfigured Security Group definition. """ ingress_rules = [] try: security_id = get_security_group_id(name=self.app_name, env=self.env, region=self.region) except (SpinnakerSecurityGroupError, AssertionError): self._create_security_group(ingress_rules) else: self.log.debug('Security Group ID %s found for %s.', security_id, self.app_name) try: ingress = self.update_default_rules() except KeyError: msg = 'Possible missing configuration for "{0}".'.format(self.env) self.log.error(msg) raise ForemastConfigurationFileError(msg) for app in ingress: rules = ingress[app] # Essentially we have two formats: simple, advanced # - simple: is just a list of ports # - advanced: selects ports ranges and protocols for rule in rules: ingress_rule = self.create_ingress_rule(app, rule) ingress_rules.append(ingress_rule) ingress_rules_no_cidr, ingress_rules_cidr = self._process_rules(ingress_rules) self._create_security_group(ingress_rules_no_cidr) # Append cidr rules self.add_cidr_rules(ingress_rules_cidr) # Tag security group self.add_tags() self.log.info('Successfully created %s security group', self.app_name) return True
[ "def", "create_security_group", "(", "self", ")", ":", "# noqa", "ingress_rules", "=", "[", "]", "try", ":", "security_id", "=", "get_security_group_id", "(", "name", "=", "self", ".", "app_name", ",", "env", "=", "self", ".", "env", ",", "region", "=", "self", ".", "region", ")", "except", "(", "SpinnakerSecurityGroupError", ",", "AssertionError", ")", ":", "self", ".", "_create_security_group", "(", "ingress_rules", ")", "else", ":", "self", ".", "log", ".", "debug", "(", "'Security Group ID %s found for %s.'", ",", "security_id", ",", "self", ".", "app_name", ")", "try", ":", "ingress", "=", "self", ".", "update_default_rules", "(", ")", "except", "KeyError", ":", "msg", "=", "'Possible missing configuration for \"{0}\".'", ".", "format", "(", "self", ".", "env", ")", "self", ".", "log", ".", "error", "(", "msg", ")", "raise", "ForemastConfigurationFileError", "(", "msg", ")", "for", "app", "in", "ingress", ":", "rules", "=", "ingress", "[", "app", "]", "# Essentially we have two formats: simple, advanced", "# - simple: is just a list of ports", "# - advanced: selects ports ranges and protocols", "for", "rule", "in", "rules", ":", "ingress_rule", "=", "self", ".", "create_ingress_rule", "(", "app", ",", "rule", ")", "ingress_rules", ".", "append", "(", "ingress_rule", ")", "ingress_rules_no_cidr", ",", "ingress_rules_cidr", "=", "self", ".", "_process_rules", "(", "ingress_rules", ")", "self", ".", "_create_security_group", "(", "ingress_rules_no_cidr", ")", "# Append cidr rules", "self", ".", "add_cidr_rules", "(", "ingress_rules_cidr", ")", "# Tag security group", "self", ".", "add_tags", "(", ")", "self", ".", "log", ".", "info", "(", "'Successfully created %s security group'", ",", "self", ".", "app_name", ")", "return", "True" ]
Send a POST to spinnaker to create or update a security group. Returns: boolean: True if created successfully Raises: ForemastConfigurationFileError: Missing environment configuration or misconfigured Security Group definition.
[ "Send", "a", "POST", "to", "spinnaker", "to", "create", "or", "update", "a", "security", "group", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/securitygroup/create_securitygroup.py#L232-L279
train
foremast/foremast
src/foremast/securitygroup/create_securitygroup.py
SpinnakerSecurityGroup.create_ingress_rule
def create_ingress_rule(self, app, rule): """Create a normalized ingress rule. Args: app (str): Application name rule (dict or int): Allowed Security Group ports and protocols. Returns: dict: Contains app, start_port, end_port, protocol, cross_account_env and cross_account_vpc_id """ if isinstance(rule, dict): # Advanced start_port = rule.get('start_port') end_port = rule.get('end_port') protocol = rule.get('protocol', 'tcp') requested_cross_account = rule.get('env', self.env) if self.env == requested_cross_account: # We are trying to use cross-account security group settings within the same account # We should not allow this. cross_account_env = None cross_account_vpc_id = None else: cross_account_env = requested_cross_account cross_account_vpc_id = get_vpc_id(cross_account_env, self.region) else: start_port = rule end_port = rule protocol = 'tcp' cross_account_env = None cross_account_vpc_id = None created_rule = { 'app': app, 'start_port': start_port, 'end_port': end_port, 'protocol': protocol, 'cross_account_env': cross_account_env, 'cross_account_vpc_id': cross_account_vpc_id } self.log.debug('Normalized ingress rule: %s', created_rule) return created_rule
python
def create_ingress_rule(self, app, rule): """Create a normalized ingress rule. Args: app (str): Application name rule (dict or int): Allowed Security Group ports and protocols. Returns: dict: Contains app, start_port, end_port, protocol, cross_account_env and cross_account_vpc_id """ if isinstance(rule, dict): # Advanced start_port = rule.get('start_port') end_port = rule.get('end_port') protocol = rule.get('protocol', 'tcp') requested_cross_account = rule.get('env', self.env) if self.env == requested_cross_account: # We are trying to use cross-account security group settings within the same account # We should not allow this. cross_account_env = None cross_account_vpc_id = None else: cross_account_env = requested_cross_account cross_account_vpc_id = get_vpc_id(cross_account_env, self.region) else: start_port = rule end_port = rule protocol = 'tcp' cross_account_env = None cross_account_vpc_id = None created_rule = { 'app': app, 'start_port': start_port, 'end_port': end_port, 'protocol': protocol, 'cross_account_env': cross_account_env, 'cross_account_vpc_id': cross_account_vpc_id } self.log.debug('Normalized ingress rule: %s', created_rule) return created_rule
[ "def", "create_ingress_rule", "(", "self", ",", "app", ",", "rule", ")", ":", "if", "isinstance", "(", "rule", ",", "dict", ")", ":", "# Advanced", "start_port", "=", "rule", ".", "get", "(", "'start_port'", ")", "end_port", "=", "rule", ".", "get", "(", "'end_port'", ")", "protocol", "=", "rule", ".", "get", "(", "'protocol'", ",", "'tcp'", ")", "requested_cross_account", "=", "rule", ".", "get", "(", "'env'", ",", "self", ".", "env", ")", "if", "self", ".", "env", "==", "requested_cross_account", ":", "# We are trying to use cross-account security group settings within the same account", "# We should not allow this.", "cross_account_env", "=", "None", "cross_account_vpc_id", "=", "None", "else", ":", "cross_account_env", "=", "requested_cross_account", "cross_account_vpc_id", "=", "get_vpc_id", "(", "cross_account_env", ",", "self", ".", "region", ")", "else", ":", "start_port", "=", "rule", "end_port", "=", "rule", "protocol", "=", "'tcp'", "cross_account_env", "=", "None", "cross_account_vpc_id", "=", "None", "created_rule", "=", "{", "'app'", ":", "app", ",", "'start_port'", ":", "start_port", ",", "'end_port'", ":", "end_port", ",", "'protocol'", ":", "protocol", ",", "'cross_account_env'", ":", "cross_account_env", ",", "'cross_account_vpc_id'", ":", "cross_account_vpc_id", "}", "self", ".", "log", ".", "debug", "(", "'Normalized ingress rule: %s'", ",", "created_rule", ")", "return", "created_rule" ]
Create a normalized ingress rule. Args: app (str): Application name rule (dict or int): Allowed Security Group ports and protocols. Returns: dict: Contains app, start_port, end_port, protocol, cross_account_env and cross_account_vpc_id
[ "Create", "a", "normalized", "ingress", "rule", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/securitygroup/create_securitygroup.py#L281-L324
train
foremast/foremast
src/foremast/utils/awslambda.py
get_lambda_arn
def get_lambda_arn(app, account, region): """Get lambda ARN. Args: account (str): AWS account name. region (str): Region name, e.g. us-east-1 app (str): Lambda function name Returns: str: ARN for requested lambda function """ session = boto3.Session(profile_name=account, region_name=region) lambda_client = session.client('lambda') lambda_arn = None paginator = lambda_client.get_paginator('list_functions') for lambda_functions in paginator.paginate(): for lambda_function in lambda_functions['Functions']: if lambda_function['FunctionName'] == app: lambda_arn = lambda_function['FunctionArn'] LOG.debug("Lambda ARN for lambda function %s is %s.", app, lambda_arn) break if lambda_arn: break if not lambda_arn: LOG.fatal('Lambda function with name %s not found in %s %s', app, account, region) raise LambdaFunctionDoesNotExist( 'Lambda function with name {0} not found in {1} {2}'.format(app, account, region)) return lambda_arn
python
def get_lambda_arn(app, account, region): """Get lambda ARN. Args: account (str): AWS account name. region (str): Region name, e.g. us-east-1 app (str): Lambda function name Returns: str: ARN for requested lambda function """ session = boto3.Session(profile_name=account, region_name=region) lambda_client = session.client('lambda') lambda_arn = None paginator = lambda_client.get_paginator('list_functions') for lambda_functions in paginator.paginate(): for lambda_function in lambda_functions['Functions']: if lambda_function['FunctionName'] == app: lambda_arn = lambda_function['FunctionArn'] LOG.debug("Lambda ARN for lambda function %s is %s.", app, lambda_arn) break if lambda_arn: break if not lambda_arn: LOG.fatal('Lambda function with name %s not found in %s %s', app, account, region) raise LambdaFunctionDoesNotExist( 'Lambda function with name {0} not found in {1} {2}'.format(app, account, region)) return lambda_arn
[ "def", "get_lambda_arn", "(", "app", ",", "account", ",", "region", ")", ":", "session", "=", "boto3", ".", "Session", "(", "profile_name", "=", "account", ",", "region_name", "=", "region", ")", "lambda_client", "=", "session", ".", "client", "(", "'lambda'", ")", "lambda_arn", "=", "None", "paginator", "=", "lambda_client", ".", "get_paginator", "(", "'list_functions'", ")", "for", "lambda_functions", "in", "paginator", ".", "paginate", "(", ")", ":", "for", "lambda_function", "in", "lambda_functions", "[", "'Functions'", "]", ":", "if", "lambda_function", "[", "'FunctionName'", "]", "==", "app", ":", "lambda_arn", "=", "lambda_function", "[", "'FunctionArn'", "]", "LOG", ".", "debug", "(", "\"Lambda ARN for lambda function %s is %s.\"", ",", "app", ",", "lambda_arn", ")", "break", "if", "lambda_arn", ":", "break", "if", "not", "lambda_arn", ":", "LOG", ".", "fatal", "(", "'Lambda function with name %s not found in %s %s'", ",", "app", ",", "account", ",", "region", ")", "raise", "LambdaFunctionDoesNotExist", "(", "'Lambda function with name {0} not found in {1} {2}'", ".", "format", "(", "app", ",", "account", ",", "region", ")", ")", "return", "lambda_arn" ]
Get lambda ARN. Args: account (str): AWS account name. region (str): Region name, e.g. us-east-1 app (str): Lambda function name Returns: str: ARN for requested lambda function
[ "Get", "lambda", "ARN", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/utils/awslambda.py#L28-L60
train
foremast/foremast
src/foremast/utils/awslambda.py
get_lambda_alias_arn
def get_lambda_alias_arn(app, account, region): """Get lambda alias ARN. Assumes that account name is equal to alias name. Args: account (str): AWS account name. region (str): Region name, e.g. us-east-1 app (str): Lambda function name Returns: str: ARN for requested lambda alias """ session = boto3.Session(profile_name=account, region_name=region) lambda_client = session.client('lambda') lambda_aliases = lambda_client.list_aliases(FunctionName=app) matched_alias = None for alias in lambda_aliases['Aliases']: if alias['Name'] == account: lambda_alias_arn = alias['AliasArn'] LOG.info('Found ARN for alias %s for function %s', account, app) matched_alias = lambda_alias_arn break else: fatal_message = 'Lambda alias {0} of function {1} not found'.format(account, app) LOG.fatal(fatal_message) raise LambdaAliasDoesNotExist(fatal_message) return matched_alias
python
def get_lambda_alias_arn(app, account, region): """Get lambda alias ARN. Assumes that account name is equal to alias name. Args: account (str): AWS account name. region (str): Region name, e.g. us-east-1 app (str): Lambda function name Returns: str: ARN for requested lambda alias """ session = boto3.Session(profile_name=account, region_name=region) lambda_client = session.client('lambda') lambda_aliases = lambda_client.list_aliases(FunctionName=app) matched_alias = None for alias in lambda_aliases['Aliases']: if alias['Name'] == account: lambda_alias_arn = alias['AliasArn'] LOG.info('Found ARN for alias %s for function %s', account, app) matched_alias = lambda_alias_arn break else: fatal_message = 'Lambda alias {0} of function {1} not found'.format(account, app) LOG.fatal(fatal_message) raise LambdaAliasDoesNotExist(fatal_message) return matched_alias
[ "def", "get_lambda_alias_arn", "(", "app", ",", "account", ",", "region", ")", ":", "session", "=", "boto3", ".", "Session", "(", "profile_name", "=", "account", ",", "region_name", "=", "region", ")", "lambda_client", "=", "session", ".", "client", "(", "'lambda'", ")", "lambda_aliases", "=", "lambda_client", ".", "list_aliases", "(", "FunctionName", "=", "app", ")", "matched_alias", "=", "None", "for", "alias", "in", "lambda_aliases", "[", "'Aliases'", "]", ":", "if", "alias", "[", "'Name'", "]", "==", "account", ":", "lambda_alias_arn", "=", "alias", "[", "'AliasArn'", "]", "LOG", ".", "info", "(", "'Found ARN for alias %s for function %s'", ",", "account", ",", "app", ")", "matched_alias", "=", "lambda_alias_arn", "break", "else", ":", "fatal_message", "=", "'Lambda alias {0} of function {1} not found'", ".", "format", "(", "account", ",", "app", ")", "LOG", ".", "fatal", "(", "fatal_message", ")", "raise", "LambdaAliasDoesNotExist", "(", "fatal_message", ")", "return", "matched_alias" ]
Get lambda alias ARN. Assumes that account name is equal to alias name. Args: account (str): AWS account name. region (str): Region name, e.g. us-east-1 app (str): Lambda function name Returns: str: ARN for requested lambda alias
[ "Get", "lambda", "alias", "ARN", ".", "Assumes", "that", "account", "name", "is", "equal", "to", "alias", "name", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/utils/awslambda.py#L63-L91
train
foremast/foremast
src/foremast/utils/awslambda.py
add_lambda_permissions
def add_lambda_permissions(function='', statement_id='', action='lambda:InvokeFunction', principal='', source_arn='', env='', region='us-east-1'): """Add permission to Lambda for the event trigger. Args: function (str): Lambda function name statement_id (str): IAM policy statement (principal) id action (str): Lambda action to allow principal (str): AWS principal to add permissions source_arn (str): ARN of the source of the event. Only needed for S3 env (str): Environment/account of function region (str): AWS region of function """ session = boto3.Session(profile_name=env, region_name=region) lambda_client = session.client('lambda') response_action = None prefixed_sid = FOREMAST_PREFIX + statement_id add_permissions_kwargs = { 'FunctionName': function, 'StatementId': prefixed_sid, 'Action': action, 'Principal': principal, } if source_arn: add_permissions_kwargs['SourceArn'] = source_arn try: lambda_client.add_permission(**add_permissions_kwargs) response_action = 'Add permission with Sid: {}'.format(prefixed_sid) except boto3.exceptions.botocore.exceptions.ClientError as error: LOG.debug('Add permission error: %s', error) response_action = "Did not add permissions" LOG.debug('Related StatementId (SID): %s', prefixed_sid) LOG.info(response_action)
python
def add_lambda_permissions(function='', statement_id='', action='lambda:InvokeFunction', principal='', source_arn='', env='', region='us-east-1'): """Add permission to Lambda for the event trigger. Args: function (str): Lambda function name statement_id (str): IAM policy statement (principal) id action (str): Lambda action to allow principal (str): AWS principal to add permissions source_arn (str): ARN of the source of the event. Only needed for S3 env (str): Environment/account of function region (str): AWS region of function """ session = boto3.Session(profile_name=env, region_name=region) lambda_client = session.client('lambda') response_action = None prefixed_sid = FOREMAST_PREFIX + statement_id add_permissions_kwargs = { 'FunctionName': function, 'StatementId': prefixed_sid, 'Action': action, 'Principal': principal, } if source_arn: add_permissions_kwargs['SourceArn'] = source_arn try: lambda_client.add_permission(**add_permissions_kwargs) response_action = 'Add permission with Sid: {}'.format(prefixed_sid) except boto3.exceptions.botocore.exceptions.ClientError as error: LOG.debug('Add permission error: %s', error) response_action = "Did not add permissions" LOG.debug('Related StatementId (SID): %s', prefixed_sid) LOG.info(response_action)
[ "def", "add_lambda_permissions", "(", "function", "=", "''", ",", "statement_id", "=", "''", ",", "action", "=", "'lambda:InvokeFunction'", ",", "principal", "=", "''", ",", "source_arn", "=", "''", ",", "env", "=", "''", ",", "region", "=", "'us-east-1'", ")", ":", "session", "=", "boto3", ".", "Session", "(", "profile_name", "=", "env", ",", "region_name", "=", "region", ")", "lambda_client", "=", "session", ".", "client", "(", "'lambda'", ")", "response_action", "=", "None", "prefixed_sid", "=", "FOREMAST_PREFIX", "+", "statement_id", "add_permissions_kwargs", "=", "{", "'FunctionName'", ":", "function", ",", "'StatementId'", ":", "prefixed_sid", ",", "'Action'", ":", "action", ",", "'Principal'", ":", "principal", ",", "}", "if", "source_arn", ":", "add_permissions_kwargs", "[", "'SourceArn'", "]", "=", "source_arn", "try", ":", "lambda_client", ".", "add_permission", "(", "*", "*", "add_permissions_kwargs", ")", "response_action", "=", "'Add permission with Sid: {}'", ".", "format", "(", "prefixed_sid", ")", "except", "boto3", ".", "exceptions", ".", "botocore", ".", "exceptions", ".", "ClientError", "as", "error", ":", "LOG", ".", "debug", "(", "'Add permission error: %s'", ",", "error", ")", "response_action", "=", "\"Did not add permissions\"", "LOG", ".", "debug", "(", "'Related StatementId (SID): %s'", ",", "prefixed_sid", ")", "LOG", ".", "info", "(", "response_action", ")" ]
Add permission to Lambda for the event trigger. Args: function (str): Lambda function name statement_id (str): IAM policy statement (principal) id action (str): Lambda action to allow principal (str): AWS principal to add permissions source_arn (str): ARN of the source of the event. Only needed for S3 env (str): Environment/account of function region (str): AWS region of function
[ "Add", "permission", "to", "Lambda", "for", "the", "event", "trigger", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/utils/awslambda.py#L94-L135
train
foremast/foremast
src/foremast/iam/resource_action.py
resource_action
def resource_action(client, action='', log_format='item: %(key)s', **kwargs): """Call _action_ using boto3 _client_ with _kwargs_. This is meant for _action_ methods that will create or implicitely prove a given Resource exists. The _log_failure_ flag is available for methods that should always succeed, but will occasionally fail due to unknown AWS issues. Args: client (botocore.client.IAM): boto3 client object. action (str): Client method to call. log_format (str): Generic log message format, 'Added' or 'Found' will be prepended depending on the scenario. prefix (str): Prefix word to use in successful INFO message. **kwargs: Keyword arguments to pass to _action_ method. Returns: dict: boto3 response. """ result = None try: result = getattr(client, action)(**kwargs) LOG.info(log_format, kwargs) except botocore.exceptions.ClientError as error: error_code = error.response['Error']['Code'] if error_code == 'AccessDenied': LOG.fatal(error) raise elif error_code == 'EntityAlreadyExists': LOG.info(' '.join(('Found', log_format)), kwargs) else: LOG.fatal(error) return result
python
def resource_action(client, action='', log_format='item: %(key)s', **kwargs): """Call _action_ using boto3 _client_ with _kwargs_. This is meant for _action_ methods that will create or implicitely prove a given Resource exists. The _log_failure_ flag is available for methods that should always succeed, but will occasionally fail due to unknown AWS issues. Args: client (botocore.client.IAM): boto3 client object. action (str): Client method to call. log_format (str): Generic log message format, 'Added' or 'Found' will be prepended depending on the scenario. prefix (str): Prefix word to use in successful INFO message. **kwargs: Keyword arguments to pass to _action_ method. Returns: dict: boto3 response. """ result = None try: result = getattr(client, action)(**kwargs) LOG.info(log_format, kwargs) except botocore.exceptions.ClientError as error: error_code = error.response['Error']['Code'] if error_code == 'AccessDenied': LOG.fatal(error) raise elif error_code == 'EntityAlreadyExists': LOG.info(' '.join(('Found', log_format)), kwargs) else: LOG.fatal(error) return result
[ "def", "resource_action", "(", "client", ",", "action", "=", "''", ",", "log_format", "=", "'item: %(key)s'", ",", "*", "*", "kwargs", ")", ":", "result", "=", "None", "try", ":", "result", "=", "getattr", "(", "client", ",", "action", ")", "(", "*", "*", "kwargs", ")", "LOG", ".", "info", "(", "log_format", ",", "kwargs", ")", "except", "botocore", ".", "exceptions", ".", "ClientError", "as", "error", ":", "error_code", "=", "error", ".", "response", "[", "'Error'", "]", "[", "'Code'", "]", "if", "error_code", "==", "'AccessDenied'", ":", "LOG", ".", "fatal", "(", "error", ")", "raise", "elif", "error_code", "==", "'EntityAlreadyExists'", ":", "LOG", ".", "info", "(", "' '", ".", "join", "(", "(", "'Found'", ",", "log_format", ")", ")", ",", "kwargs", ")", "else", ":", "LOG", ".", "fatal", "(", "error", ")", "return", "result" ]
Call _action_ using boto3 _client_ with _kwargs_. This is meant for _action_ methods that will create or implicitely prove a given Resource exists. The _log_failure_ flag is available for methods that should always succeed, but will occasionally fail due to unknown AWS issues. Args: client (botocore.client.IAM): boto3 client object. action (str): Client method to call. log_format (str): Generic log message format, 'Added' or 'Found' will be prepended depending on the scenario. prefix (str): Prefix word to use in successful INFO message. **kwargs: Keyword arguments to pass to _action_ method. Returns: dict: boto3 response.
[ "Call", "_action_", "using", "boto3", "_client_", "with", "_kwargs_", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/iam/resource_action.py#L24-L59
train
foremast/foremast
src/foremast/elb/__main__.py
main
def main(): """Entry point for ELB creation""" logging.basicConfig(format=LOGGING_FORMAT) parser = argparse.ArgumentParser(description='Example with non-optional arguments') add_debug(parser) add_app(parser) add_env(parser) add_region(parser) add_properties(parser) args = parser.parse_args() logging.getLogger(__package__.split('.')[0]).setLevel(args.debug) elb = SpinnakerELB(app=args.app, env=args.env, region=args.region, prop_path=args.properties) elb.create_elb()
python
def main(): """Entry point for ELB creation""" logging.basicConfig(format=LOGGING_FORMAT) parser = argparse.ArgumentParser(description='Example with non-optional arguments') add_debug(parser) add_app(parser) add_env(parser) add_region(parser) add_properties(parser) args = parser.parse_args() logging.getLogger(__package__.split('.')[0]).setLevel(args.debug) elb = SpinnakerELB(app=args.app, env=args.env, region=args.region, prop_path=args.properties) elb.create_elb()
[ "def", "main", "(", ")", ":", "logging", ".", "basicConfig", "(", "format", "=", "LOGGING_FORMAT", ")", "parser", "=", "argparse", ".", "ArgumentParser", "(", "description", "=", "'Example with non-optional arguments'", ")", "add_debug", "(", "parser", ")", "add_app", "(", "parser", ")", "add_env", "(", "parser", ")", "add_region", "(", "parser", ")", "add_properties", "(", "parser", ")", "args", "=", "parser", ".", "parse_args", "(", ")", "logging", ".", "getLogger", "(", "__package__", ".", "split", "(", "'.'", ")", "[", "0", "]", ")", ".", "setLevel", "(", "args", ".", "debug", ")", "elb", "=", "SpinnakerELB", "(", "app", "=", "args", ".", "app", ",", "env", "=", "args", ".", "env", ",", "region", "=", "args", ".", "region", ",", "prop_path", "=", "args", ".", "properties", ")", "elb", ".", "create_elb", "(", ")" ]
Entry point for ELB creation
[ "Entry", "point", "for", "ELB", "creation" ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/elb/__main__.py#L30-L47
train
foremast/foremast
src/foremast/elb/create_elb.py
SpinnakerELB.make_elb_json
def make_elb_json(self): """Render the JSON template with arguments. Returns: str: Rendered ELB template. """ env = self.env region = self.region elb_settings = self.properties['elb'] LOG.debug('Block ELB Settings:\n%s', pformat(elb_settings)) health_settings = elb_settings['health'] elb_subnet_purpose = elb_settings.get('subnet_purpose', 'internal') region_subnets = get_subnets(target='elb', purpose=elb_subnet_purpose, env=env, region=region) region_subnets.pop("subnet_ids", None) # CAVEAT: Setting the ELB to public, you must use a public subnet, # otherwise AWS complains about missing IGW on subnet. if elb_subnet_purpose == 'internal': is_internal = 'true' else: is_internal = 'false' target = elb_settings.get('target', 'HTTP:80/health') health = splay_health(target) listeners = format_listeners(elb_settings=elb_settings, env=self.env, region=region) idle_timeout = elb_settings.get('idle_timeout', None) access_log = elb_settings.get('access_log', {}) connection_draining_timeout = elb_settings.get('connection_draining_timeout', None) security_groups = DEFAULT_ELB_SECURITYGROUPS[env] security_groups.append(self.app) security_groups.extend(self.properties['security_group']['elb_extras']) security_groups = remove_duplicate_sg(security_groups) template_kwargs = { 'access_log': json.dumps(access_log), 'app_name': self.app, 'availability_zones': json.dumps(region_subnets), 'connection_draining_timeout': json.dumps(connection_draining_timeout), 'env': env, 'hc_string': target, 'health_interval': health_settings['interval'], 'health_path': health.path, 'health_port': health.port, 'health_protocol': health.proto, 'health_timeout': health_settings['timeout'], 'healthy_threshold': health_settings['threshold'], 'idle_timeout': json.dumps(idle_timeout), 'isInternal': is_internal, 'listeners': json.dumps(listeners), 'region_zones': json.dumps(region_subnets[region]), 'region': region, 'security_groups': json.dumps(security_groups), 'subnet_type': elb_subnet_purpose, 'unhealthy_threshold': health_settings['unhealthy_threshold'], 'vpc_id': get_vpc_id(env, region), } rendered_template = get_template(template_file='infrastructure/elb_data.json.j2', **template_kwargs) return rendered_template
python
def make_elb_json(self): """Render the JSON template with arguments. Returns: str: Rendered ELB template. """ env = self.env region = self.region elb_settings = self.properties['elb'] LOG.debug('Block ELB Settings:\n%s', pformat(elb_settings)) health_settings = elb_settings['health'] elb_subnet_purpose = elb_settings.get('subnet_purpose', 'internal') region_subnets = get_subnets(target='elb', purpose=elb_subnet_purpose, env=env, region=region) region_subnets.pop("subnet_ids", None) # CAVEAT: Setting the ELB to public, you must use a public subnet, # otherwise AWS complains about missing IGW on subnet. if elb_subnet_purpose == 'internal': is_internal = 'true' else: is_internal = 'false' target = elb_settings.get('target', 'HTTP:80/health') health = splay_health(target) listeners = format_listeners(elb_settings=elb_settings, env=self.env, region=region) idle_timeout = elb_settings.get('idle_timeout', None) access_log = elb_settings.get('access_log', {}) connection_draining_timeout = elb_settings.get('connection_draining_timeout', None) security_groups = DEFAULT_ELB_SECURITYGROUPS[env] security_groups.append(self.app) security_groups.extend(self.properties['security_group']['elb_extras']) security_groups = remove_duplicate_sg(security_groups) template_kwargs = { 'access_log': json.dumps(access_log), 'app_name': self.app, 'availability_zones': json.dumps(region_subnets), 'connection_draining_timeout': json.dumps(connection_draining_timeout), 'env': env, 'hc_string': target, 'health_interval': health_settings['interval'], 'health_path': health.path, 'health_port': health.port, 'health_protocol': health.proto, 'health_timeout': health_settings['timeout'], 'healthy_threshold': health_settings['threshold'], 'idle_timeout': json.dumps(idle_timeout), 'isInternal': is_internal, 'listeners': json.dumps(listeners), 'region_zones': json.dumps(region_subnets[region]), 'region': region, 'security_groups': json.dumps(security_groups), 'subnet_type': elb_subnet_purpose, 'unhealthy_threshold': health_settings['unhealthy_threshold'], 'vpc_id': get_vpc_id(env, region), } rendered_template = get_template(template_file='infrastructure/elb_data.json.j2', **template_kwargs) return rendered_template
[ "def", "make_elb_json", "(", "self", ")", ":", "env", "=", "self", ".", "env", "region", "=", "self", ".", "region", "elb_settings", "=", "self", ".", "properties", "[", "'elb'", "]", "LOG", ".", "debug", "(", "'Block ELB Settings:\\n%s'", ",", "pformat", "(", "elb_settings", ")", ")", "health_settings", "=", "elb_settings", "[", "'health'", "]", "elb_subnet_purpose", "=", "elb_settings", ".", "get", "(", "'subnet_purpose'", ",", "'internal'", ")", "region_subnets", "=", "get_subnets", "(", "target", "=", "'elb'", ",", "purpose", "=", "elb_subnet_purpose", ",", "env", "=", "env", ",", "region", "=", "region", ")", "region_subnets", ".", "pop", "(", "\"subnet_ids\"", ",", "None", ")", "# CAVEAT: Setting the ELB to public, you must use a public subnet,", "# otherwise AWS complains about missing IGW on subnet.", "if", "elb_subnet_purpose", "==", "'internal'", ":", "is_internal", "=", "'true'", "else", ":", "is_internal", "=", "'false'", "target", "=", "elb_settings", ".", "get", "(", "'target'", ",", "'HTTP:80/health'", ")", "health", "=", "splay_health", "(", "target", ")", "listeners", "=", "format_listeners", "(", "elb_settings", "=", "elb_settings", ",", "env", "=", "self", ".", "env", ",", "region", "=", "region", ")", "idle_timeout", "=", "elb_settings", ".", "get", "(", "'idle_timeout'", ",", "None", ")", "access_log", "=", "elb_settings", ".", "get", "(", "'access_log'", ",", "{", "}", ")", "connection_draining_timeout", "=", "elb_settings", ".", "get", "(", "'connection_draining_timeout'", ",", "None", ")", "security_groups", "=", "DEFAULT_ELB_SECURITYGROUPS", "[", "env", "]", "security_groups", ".", "append", "(", "self", ".", "app", ")", "security_groups", ".", "extend", "(", "self", ".", "properties", "[", "'security_group'", "]", "[", "'elb_extras'", "]", ")", "security_groups", "=", "remove_duplicate_sg", "(", "security_groups", ")", "template_kwargs", "=", "{", "'access_log'", ":", "json", ".", "dumps", "(", "access_log", ")", ",", "'app_name'", ":", "self", ".", "app", ",", "'availability_zones'", ":", "json", ".", "dumps", "(", "region_subnets", ")", ",", "'connection_draining_timeout'", ":", "json", ".", "dumps", "(", "connection_draining_timeout", ")", ",", "'env'", ":", "env", ",", "'hc_string'", ":", "target", ",", "'health_interval'", ":", "health_settings", "[", "'interval'", "]", ",", "'health_path'", ":", "health", ".", "path", ",", "'health_port'", ":", "health", ".", "port", ",", "'health_protocol'", ":", "health", ".", "proto", ",", "'health_timeout'", ":", "health_settings", "[", "'timeout'", "]", ",", "'healthy_threshold'", ":", "health_settings", "[", "'threshold'", "]", ",", "'idle_timeout'", ":", "json", ".", "dumps", "(", "idle_timeout", ")", ",", "'isInternal'", ":", "is_internal", ",", "'listeners'", ":", "json", ".", "dumps", "(", "listeners", ")", ",", "'region_zones'", ":", "json", ".", "dumps", "(", "region_subnets", "[", "region", "]", ")", ",", "'region'", ":", "region", ",", "'security_groups'", ":", "json", ".", "dumps", "(", "security_groups", ")", ",", "'subnet_type'", ":", "elb_subnet_purpose", ",", "'unhealthy_threshold'", ":", "health_settings", "[", "'unhealthy_threshold'", "]", ",", "'vpc_id'", ":", "get_vpc_id", "(", "env", ",", "region", ")", ",", "}", "rendered_template", "=", "get_template", "(", "template_file", "=", "'infrastructure/elb_data.json.j2'", ",", "*", "*", "template_kwargs", ")", "return", "rendered_template" ]
Render the JSON template with arguments. Returns: str: Rendered ELB template.
[ "Render", "the", "JSON", "template", "with", "arguments", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/elb/create_elb.py#L47-L112
train
foremast/foremast
src/foremast/elb/create_elb.py
SpinnakerELB.create_elb
def create_elb(self): """Create or Update the ELB after rendering JSON data from configs. Asserts that the ELB task was successful. """ json_data = self.make_elb_json() LOG.debug('Block ELB JSON Data:\n%s', pformat(json_data)) wait_for_task(json_data) self.add_listener_policy(json_data) self.add_backend_policy(json_data) self.configure_attributes(json_data)
python
def create_elb(self): """Create or Update the ELB after rendering JSON data from configs. Asserts that the ELB task was successful. """ json_data = self.make_elb_json() LOG.debug('Block ELB JSON Data:\n%s', pformat(json_data)) wait_for_task(json_data) self.add_listener_policy(json_data) self.add_backend_policy(json_data) self.configure_attributes(json_data)
[ "def", "create_elb", "(", "self", ")", ":", "json_data", "=", "self", ".", "make_elb_json", "(", ")", "LOG", ".", "debug", "(", "'Block ELB JSON Data:\\n%s'", ",", "pformat", "(", "json_data", ")", ")", "wait_for_task", "(", "json_data", ")", "self", ".", "add_listener_policy", "(", "json_data", ")", "self", ".", "add_backend_policy", "(", "json_data", ")", "self", ".", "configure_attributes", "(", "json_data", ")" ]
Create or Update the ELB after rendering JSON data from configs. Asserts that the ELB task was successful.
[ "Create", "or", "Update", "the", "ELB", "after", "rendering", "JSON", "data", "from", "configs", ".", "Asserts", "that", "the", "ELB", "task", "was", "successful", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/elb/create_elb.py#L114-L127
train
foremast/foremast
src/foremast/elb/create_elb.py
SpinnakerELB.add_listener_policy
def add_listener_policy(self, json_data): """Attaches listerner policies to an ELB Args: json_data (json): return data from ELB upsert """ env = boto3.session.Session(profile_name=self.env, region_name=self.region) elbclient = env.client('elb') # create stickiness policy if set in configs stickiness = {} elb_settings = self.properties['elb'] if elb_settings.get('ports'): ports = elb_settings['ports'] for listener in ports: if listener.get("stickiness"): stickiness = self.add_stickiness() LOG.info('Stickiness Found: %s', stickiness) break # Attach policies to created ELB for job in json.loads(json_data)['job']: for listener in job['listeners']: policies = [] ext_port = listener['externalPort'] if listener['listenerPolicies']: policies.extend(listener['listenerPolicies']) if stickiness.get(ext_port): policies.append(stickiness.get(ext_port)) if policies: LOG.info('Adding listener policies: %s', policies) elbclient.set_load_balancer_policies_of_listener( LoadBalancerName=self.app, LoadBalancerPort=ext_port, PolicyNames=policies)
python
def add_listener_policy(self, json_data): """Attaches listerner policies to an ELB Args: json_data (json): return data from ELB upsert """ env = boto3.session.Session(profile_name=self.env, region_name=self.region) elbclient = env.client('elb') # create stickiness policy if set in configs stickiness = {} elb_settings = self.properties['elb'] if elb_settings.get('ports'): ports = elb_settings['ports'] for listener in ports: if listener.get("stickiness"): stickiness = self.add_stickiness() LOG.info('Stickiness Found: %s', stickiness) break # Attach policies to created ELB for job in json.loads(json_data)['job']: for listener in job['listeners']: policies = [] ext_port = listener['externalPort'] if listener['listenerPolicies']: policies.extend(listener['listenerPolicies']) if stickiness.get(ext_port): policies.append(stickiness.get(ext_port)) if policies: LOG.info('Adding listener policies: %s', policies) elbclient.set_load_balancer_policies_of_listener( LoadBalancerName=self.app, LoadBalancerPort=ext_port, PolicyNames=policies)
[ "def", "add_listener_policy", "(", "self", ",", "json_data", ")", ":", "env", "=", "boto3", ".", "session", ".", "Session", "(", "profile_name", "=", "self", ".", "env", ",", "region_name", "=", "self", ".", "region", ")", "elbclient", "=", "env", ".", "client", "(", "'elb'", ")", "# create stickiness policy if set in configs", "stickiness", "=", "{", "}", "elb_settings", "=", "self", ".", "properties", "[", "'elb'", "]", "if", "elb_settings", ".", "get", "(", "'ports'", ")", ":", "ports", "=", "elb_settings", "[", "'ports'", "]", "for", "listener", "in", "ports", ":", "if", "listener", ".", "get", "(", "\"stickiness\"", ")", ":", "stickiness", "=", "self", ".", "add_stickiness", "(", ")", "LOG", ".", "info", "(", "'Stickiness Found: %s'", ",", "stickiness", ")", "break", "# Attach policies to created ELB", "for", "job", "in", "json", ".", "loads", "(", "json_data", ")", "[", "'job'", "]", ":", "for", "listener", "in", "job", "[", "'listeners'", "]", ":", "policies", "=", "[", "]", "ext_port", "=", "listener", "[", "'externalPort'", "]", "if", "listener", "[", "'listenerPolicies'", "]", ":", "policies", ".", "extend", "(", "listener", "[", "'listenerPolicies'", "]", ")", "if", "stickiness", ".", "get", "(", "ext_port", ")", ":", "policies", ".", "append", "(", "stickiness", ".", "get", "(", "ext_port", ")", ")", "if", "policies", ":", "LOG", ".", "info", "(", "'Adding listener policies: %s'", ",", "policies", ")", "elbclient", ".", "set_load_balancer_policies_of_listener", "(", "LoadBalancerName", "=", "self", ".", "app", ",", "LoadBalancerPort", "=", "ext_port", ",", "PolicyNames", "=", "policies", ")" ]
Attaches listerner policies to an ELB Args: json_data (json): return data from ELB upsert
[ "Attaches", "listerner", "policies", "to", "an", "ELB" ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/elb/create_elb.py#L129-L161
train
foremast/foremast
src/foremast/elb/create_elb.py
SpinnakerELB.add_backend_policy
def add_backend_policy(self, json_data): """Attaches backend server policies to an ELB Args: json_data (json): return data from ELB upsert """ env = boto3.session.Session(profile_name=self.env, region_name=self.region) elbclient = env.client('elb') # Attach backend server policies to created ELB for job in json.loads(json_data)['job']: for listener in job['listeners']: instance_port = listener['internalPort'] backend_policy_list = listener['backendPolicies'] if backend_policy_list: LOG.info('Adding backend server policies: %s', backend_policy_list) elbclient.set_load_balancer_policies_for_backend_server( LoadBalancerName=self.app, InstancePort=instance_port, PolicyNames=backend_policy_list)
python
def add_backend_policy(self, json_data): """Attaches backend server policies to an ELB Args: json_data (json): return data from ELB upsert """ env = boto3.session.Session(profile_name=self.env, region_name=self.region) elbclient = env.client('elb') # Attach backend server policies to created ELB for job in json.loads(json_data)['job']: for listener in job['listeners']: instance_port = listener['internalPort'] backend_policy_list = listener['backendPolicies'] if backend_policy_list: LOG.info('Adding backend server policies: %s', backend_policy_list) elbclient.set_load_balancer_policies_for_backend_server( LoadBalancerName=self.app, InstancePort=instance_port, PolicyNames=backend_policy_list)
[ "def", "add_backend_policy", "(", "self", ",", "json_data", ")", ":", "env", "=", "boto3", ".", "session", ".", "Session", "(", "profile_name", "=", "self", ".", "env", ",", "region_name", "=", "self", ".", "region", ")", "elbclient", "=", "env", ".", "client", "(", "'elb'", ")", "# Attach backend server policies to created ELB", "for", "job", "in", "json", ".", "loads", "(", "json_data", ")", "[", "'job'", "]", ":", "for", "listener", "in", "job", "[", "'listeners'", "]", ":", "instance_port", "=", "listener", "[", "'internalPort'", "]", "backend_policy_list", "=", "listener", "[", "'backendPolicies'", "]", "if", "backend_policy_list", ":", "LOG", ".", "info", "(", "'Adding backend server policies: %s'", ",", "backend_policy_list", ")", "elbclient", ".", "set_load_balancer_policies_for_backend_server", "(", "LoadBalancerName", "=", "self", ".", "app", ",", "InstancePort", "=", "instance_port", ",", "PolicyNames", "=", "backend_policy_list", ")" ]
Attaches backend server policies to an ELB Args: json_data (json): return data from ELB upsert
[ "Attaches", "backend", "server", "policies", "to", "an", "ELB" ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/elb/create_elb.py#L163-L180
train
foremast/foremast
src/foremast/elb/create_elb.py
SpinnakerELB.add_stickiness
def add_stickiness(self): """ Adds stickiness policy to created ELB Returns: dict: A dict of stickiness policies and ports:: example: { 80: "$policy_name" } """ stickiness_dict = {} env = boto3.session.Session(profile_name=self.env, region_name=self.region) elbclient = env.client('elb') elb_settings = self.properties['elb'] for listener in elb_settings.get('ports'): if listener.get("stickiness"): sticky_type = listener['stickiness']['type'].lower() externalport = int(listener['loadbalancer'].split(":")[-1]) policyname_tmp = "{0}-{1}-{2}-{3}" if sticky_type == 'app': cookiename = listener['stickiness']['cookie_name'] policy_key = cookiename.replace('.', '') policyname = policyname_tmp.format(self.app, sticky_type, externalport, policy_key) elbclient.create_app_cookie_stickiness_policy( LoadBalancerName=self.app, PolicyName=policyname, CookieName=cookiename) stickiness_dict[externalport] = policyname elif sticky_type == 'elb': cookie_ttl = listener['stickiness'].get('cookie_ttl', None) policyname = policyname_tmp.format(self.app, sticky_type, externalport, cookie_ttl) if cookie_ttl: elbclient.create_lb_cookie_stickiness_policy( LoadBalancerName=self.app, PolicyName=policyname, CookieExpirationPeriod=cookie_ttl) else: elbclient.create_lb_cookie_stickiness_policy(LoadBalancerName=self.app, PolicyName=policyname) stickiness_dict[externalport] = policyname return stickiness_dict
python
def add_stickiness(self): """ Adds stickiness policy to created ELB Returns: dict: A dict of stickiness policies and ports:: example: { 80: "$policy_name" } """ stickiness_dict = {} env = boto3.session.Session(profile_name=self.env, region_name=self.region) elbclient = env.client('elb') elb_settings = self.properties['elb'] for listener in elb_settings.get('ports'): if listener.get("stickiness"): sticky_type = listener['stickiness']['type'].lower() externalport = int(listener['loadbalancer'].split(":")[-1]) policyname_tmp = "{0}-{1}-{2}-{3}" if sticky_type == 'app': cookiename = listener['stickiness']['cookie_name'] policy_key = cookiename.replace('.', '') policyname = policyname_tmp.format(self.app, sticky_type, externalport, policy_key) elbclient.create_app_cookie_stickiness_policy( LoadBalancerName=self.app, PolicyName=policyname, CookieName=cookiename) stickiness_dict[externalport] = policyname elif sticky_type == 'elb': cookie_ttl = listener['stickiness'].get('cookie_ttl', None) policyname = policyname_tmp.format(self.app, sticky_type, externalport, cookie_ttl) if cookie_ttl: elbclient.create_lb_cookie_stickiness_policy( LoadBalancerName=self.app, PolicyName=policyname, CookieExpirationPeriod=cookie_ttl) else: elbclient.create_lb_cookie_stickiness_policy(LoadBalancerName=self.app, PolicyName=policyname) stickiness_dict[externalport] = policyname return stickiness_dict
[ "def", "add_stickiness", "(", "self", ")", ":", "stickiness_dict", "=", "{", "}", "env", "=", "boto3", ".", "session", ".", "Session", "(", "profile_name", "=", "self", ".", "env", ",", "region_name", "=", "self", ".", "region", ")", "elbclient", "=", "env", ".", "client", "(", "'elb'", ")", "elb_settings", "=", "self", ".", "properties", "[", "'elb'", "]", "for", "listener", "in", "elb_settings", ".", "get", "(", "'ports'", ")", ":", "if", "listener", ".", "get", "(", "\"stickiness\"", ")", ":", "sticky_type", "=", "listener", "[", "'stickiness'", "]", "[", "'type'", "]", ".", "lower", "(", ")", "externalport", "=", "int", "(", "listener", "[", "'loadbalancer'", "]", ".", "split", "(", "\":\"", ")", "[", "-", "1", "]", ")", "policyname_tmp", "=", "\"{0}-{1}-{2}-{3}\"", "if", "sticky_type", "==", "'app'", ":", "cookiename", "=", "listener", "[", "'stickiness'", "]", "[", "'cookie_name'", "]", "policy_key", "=", "cookiename", ".", "replace", "(", "'.'", ",", "''", ")", "policyname", "=", "policyname_tmp", ".", "format", "(", "self", ".", "app", ",", "sticky_type", ",", "externalport", ",", "policy_key", ")", "elbclient", ".", "create_app_cookie_stickiness_policy", "(", "LoadBalancerName", "=", "self", ".", "app", ",", "PolicyName", "=", "policyname", ",", "CookieName", "=", "cookiename", ")", "stickiness_dict", "[", "externalport", "]", "=", "policyname", "elif", "sticky_type", "==", "'elb'", ":", "cookie_ttl", "=", "listener", "[", "'stickiness'", "]", ".", "get", "(", "'cookie_ttl'", ",", "None", ")", "policyname", "=", "policyname_tmp", ".", "format", "(", "self", ".", "app", ",", "sticky_type", ",", "externalport", ",", "cookie_ttl", ")", "if", "cookie_ttl", ":", "elbclient", ".", "create_lb_cookie_stickiness_policy", "(", "LoadBalancerName", "=", "self", ".", "app", ",", "PolicyName", "=", "policyname", ",", "CookieExpirationPeriod", "=", "cookie_ttl", ")", "else", ":", "elbclient", ".", "create_lb_cookie_stickiness_policy", "(", "LoadBalancerName", "=", "self", ".", "app", ",", "PolicyName", "=", "policyname", ")", "stickiness_dict", "[", "externalport", "]", "=", "policyname", "return", "stickiness_dict" ]
Adds stickiness policy to created ELB Returns: dict: A dict of stickiness policies and ports:: example: { 80: "$policy_name" }
[ "Adds", "stickiness", "policy", "to", "created", "ELB" ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/elb/create_elb.py#L182-L218
train
foremast/foremast
src/foremast/elb/create_elb.py
SpinnakerELB.configure_attributes
def configure_attributes(self, json_data): """Configure load balancer attributes such as idle timeout, connection draining, etc Args: json_data (json): return data from ELB upsert """ env = boto3.session.Session(profile_name=self.env, region_name=self.region) elbclient = env.client('elb') elb_settings = self.properties['elb'] LOG.debug('Block ELB Settings Pre Configure Load Balancer Attributes:\n%s', pformat(elb_settings)) # FIXME: Determine why 'job' is not being used # pylint: disable=unused-variable for job in json.loads(json_data)['job']: load_balancer_attributes = { 'CrossZoneLoadBalancing': { 'Enabled': True }, 'AccessLog': { 'Enabled': False, }, 'ConnectionDraining': { 'Enabled': False, }, 'ConnectionSettings': { 'IdleTimeout': 60 } } if elb_settings.get('connection_draining_timeout'): connection_draining_timeout = int(elb_settings['connection_draining_timeout']) LOG.info('Applying Custom Load Balancer Connection Draining Timeout: %d', connection_draining_timeout) load_balancer_attributes['ConnectionDraining'] = { 'Enabled': True, 'Timeout': connection_draining_timeout } if elb_settings.get('idle_timeout'): idle_timeout = int(elb_settings['idle_timeout']) LOG.info('Applying Custom Load Balancer Idle Timeout: %d', idle_timeout) load_balancer_attributes['ConnectionSettings'] = {'IdleTimeout': idle_timeout} if elb_settings.get('access_log'): access_log_bucket_name = elb_settings['access_log']['bucket_name'] access_log_bucket_prefix = elb_settings['access_log']['bucket_prefix'] access_log_emit_interval = int(elb_settings['access_log']['emit_interval']) LOG.info('Applying Custom Load Balancer Access Log: %s/%s every %d minutes', access_log_bucket_name, access_log_bucket_prefix, access_log_emit_interval) load_balancer_attributes['AccessLog'] = { 'Enabled': True, 'S3BucketName': access_log_bucket_name, 'EmitInterval': access_log_emit_interval, 'S3BucketPrefix': access_log_bucket_prefix } LOG.info('Applying Load Balancer Attributes') LOG.debug('Load Balancer Attributes:\n%s', pformat(load_balancer_attributes)) elbclient.modify_load_balancer_attributes( LoadBalancerName=self.app, LoadBalancerAttributes=load_balancer_attributes)
python
def configure_attributes(self, json_data): """Configure load balancer attributes such as idle timeout, connection draining, etc Args: json_data (json): return data from ELB upsert """ env = boto3.session.Session(profile_name=self.env, region_name=self.region) elbclient = env.client('elb') elb_settings = self.properties['elb'] LOG.debug('Block ELB Settings Pre Configure Load Balancer Attributes:\n%s', pformat(elb_settings)) # FIXME: Determine why 'job' is not being used # pylint: disable=unused-variable for job in json.loads(json_data)['job']: load_balancer_attributes = { 'CrossZoneLoadBalancing': { 'Enabled': True }, 'AccessLog': { 'Enabled': False, }, 'ConnectionDraining': { 'Enabled': False, }, 'ConnectionSettings': { 'IdleTimeout': 60 } } if elb_settings.get('connection_draining_timeout'): connection_draining_timeout = int(elb_settings['connection_draining_timeout']) LOG.info('Applying Custom Load Balancer Connection Draining Timeout: %d', connection_draining_timeout) load_balancer_attributes['ConnectionDraining'] = { 'Enabled': True, 'Timeout': connection_draining_timeout } if elb_settings.get('idle_timeout'): idle_timeout = int(elb_settings['idle_timeout']) LOG.info('Applying Custom Load Balancer Idle Timeout: %d', idle_timeout) load_balancer_attributes['ConnectionSettings'] = {'IdleTimeout': idle_timeout} if elb_settings.get('access_log'): access_log_bucket_name = elb_settings['access_log']['bucket_name'] access_log_bucket_prefix = elb_settings['access_log']['bucket_prefix'] access_log_emit_interval = int(elb_settings['access_log']['emit_interval']) LOG.info('Applying Custom Load Balancer Access Log: %s/%s every %d minutes', access_log_bucket_name, access_log_bucket_prefix, access_log_emit_interval) load_balancer_attributes['AccessLog'] = { 'Enabled': True, 'S3BucketName': access_log_bucket_name, 'EmitInterval': access_log_emit_interval, 'S3BucketPrefix': access_log_bucket_prefix } LOG.info('Applying Load Balancer Attributes') LOG.debug('Load Balancer Attributes:\n%s', pformat(load_balancer_attributes)) elbclient.modify_load_balancer_attributes( LoadBalancerName=self.app, LoadBalancerAttributes=load_balancer_attributes)
[ "def", "configure_attributes", "(", "self", ",", "json_data", ")", ":", "env", "=", "boto3", ".", "session", ".", "Session", "(", "profile_name", "=", "self", ".", "env", ",", "region_name", "=", "self", ".", "region", ")", "elbclient", "=", "env", ".", "client", "(", "'elb'", ")", "elb_settings", "=", "self", ".", "properties", "[", "'elb'", "]", "LOG", ".", "debug", "(", "'Block ELB Settings Pre Configure Load Balancer Attributes:\\n%s'", ",", "pformat", "(", "elb_settings", ")", ")", "# FIXME: Determine why 'job' is not being used", "# pylint: disable=unused-variable", "for", "job", "in", "json", ".", "loads", "(", "json_data", ")", "[", "'job'", "]", ":", "load_balancer_attributes", "=", "{", "'CrossZoneLoadBalancing'", ":", "{", "'Enabled'", ":", "True", "}", ",", "'AccessLog'", ":", "{", "'Enabled'", ":", "False", ",", "}", ",", "'ConnectionDraining'", ":", "{", "'Enabled'", ":", "False", ",", "}", ",", "'ConnectionSettings'", ":", "{", "'IdleTimeout'", ":", "60", "}", "}", "if", "elb_settings", ".", "get", "(", "'connection_draining_timeout'", ")", ":", "connection_draining_timeout", "=", "int", "(", "elb_settings", "[", "'connection_draining_timeout'", "]", ")", "LOG", ".", "info", "(", "'Applying Custom Load Balancer Connection Draining Timeout: %d'", ",", "connection_draining_timeout", ")", "load_balancer_attributes", "[", "'ConnectionDraining'", "]", "=", "{", "'Enabled'", ":", "True", ",", "'Timeout'", ":", "connection_draining_timeout", "}", "if", "elb_settings", ".", "get", "(", "'idle_timeout'", ")", ":", "idle_timeout", "=", "int", "(", "elb_settings", "[", "'idle_timeout'", "]", ")", "LOG", ".", "info", "(", "'Applying Custom Load Balancer Idle Timeout: %d'", ",", "idle_timeout", ")", "load_balancer_attributes", "[", "'ConnectionSettings'", "]", "=", "{", "'IdleTimeout'", ":", "idle_timeout", "}", "if", "elb_settings", ".", "get", "(", "'access_log'", ")", ":", "access_log_bucket_name", "=", "elb_settings", "[", "'access_log'", "]", "[", "'bucket_name'", "]", "access_log_bucket_prefix", "=", "elb_settings", "[", "'access_log'", "]", "[", "'bucket_prefix'", "]", "access_log_emit_interval", "=", "int", "(", "elb_settings", "[", "'access_log'", "]", "[", "'emit_interval'", "]", ")", "LOG", ".", "info", "(", "'Applying Custom Load Balancer Access Log: %s/%s every %d minutes'", ",", "access_log_bucket_name", ",", "access_log_bucket_prefix", ",", "access_log_emit_interval", ")", "load_balancer_attributes", "[", "'AccessLog'", "]", "=", "{", "'Enabled'", ":", "True", ",", "'S3BucketName'", ":", "access_log_bucket_name", ",", "'EmitInterval'", ":", "access_log_emit_interval", ",", "'S3BucketPrefix'", ":", "access_log_bucket_prefix", "}", "LOG", ".", "info", "(", "'Applying Load Balancer Attributes'", ")", "LOG", ".", "debug", "(", "'Load Balancer Attributes:\\n%s'", ",", "pformat", "(", "load_balancer_attributes", ")", ")", "elbclient", ".", "modify_load_balancer_attributes", "(", "LoadBalancerName", "=", "self", ".", "app", ",", "LoadBalancerAttributes", "=", "load_balancer_attributes", ")" ]
Configure load balancer attributes such as idle timeout, connection draining, etc Args: json_data (json): return data from ELB upsert
[ "Configure", "load", "balancer", "attributes", "such", "as", "idle", "timeout", "connection", "draining", "etc" ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/elb/create_elb.py#L220-L276
train
foremast/foremast
src/foremast/configs/__main__.py
main
def main(): """Append Application Configurations to a given file in multiple formats.""" logging.basicConfig(format=LOGGING_FORMAT) parser = argparse.ArgumentParser(description=main.__doc__) add_debug(parser) parser.add_argument('-o', '--output', required=True, help='Name of environment file to append to') parser.add_argument( '-g', '--git-short', metavar='GROUP/PROJECT', required=True, help='Short name for Git, e.g. forrest/core') parser.add_argument('-r', '--runway-dir', help='Runway directory with app.json files, requires --git-short') args = parser.parse_args() LOG.setLevel(args.debug) logging.getLogger(__package__.split('.')[0]).setLevel(args.debug) generated = gogoutils.Generator(*gogoutils.Parser(args.git_short).parse_url(), formats=APP_FORMATS) git_short = generated.gitlab()['main'] if args.runway_dir: configs = process_runway_configs(runway_dir=args.runway_dir) else: configs = process_git_configs(git_short=git_short) write_variables(app_configs=configs, out_file=args.output, git_short=git_short)
python
def main(): """Append Application Configurations to a given file in multiple formats.""" logging.basicConfig(format=LOGGING_FORMAT) parser = argparse.ArgumentParser(description=main.__doc__) add_debug(parser) parser.add_argument('-o', '--output', required=True, help='Name of environment file to append to') parser.add_argument( '-g', '--git-short', metavar='GROUP/PROJECT', required=True, help='Short name for Git, e.g. forrest/core') parser.add_argument('-r', '--runway-dir', help='Runway directory with app.json files, requires --git-short') args = parser.parse_args() LOG.setLevel(args.debug) logging.getLogger(__package__.split('.')[0]).setLevel(args.debug) generated = gogoutils.Generator(*gogoutils.Parser(args.git_short).parse_url(), formats=APP_FORMATS) git_short = generated.gitlab()['main'] if args.runway_dir: configs = process_runway_configs(runway_dir=args.runway_dir) else: configs = process_git_configs(git_short=git_short) write_variables(app_configs=configs, out_file=args.output, git_short=git_short)
[ "def", "main", "(", ")", ":", "logging", ".", "basicConfig", "(", "format", "=", "LOGGING_FORMAT", ")", "parser", "=", "argparse", ".", "ArgumentParser", "(", "description", "=", "main", ".", "__doc__", ")", "add_debug", "(", "parser", ")", "parser", ".", "add_argument", "(", "'-o'", ",", "'--output'", ",", "required", "=", "True", ",", "help", "=", "'Name of environment file to append to'", ")", "parser", ".", "add_argument", "(", "'-g'", ",", "'--git-short'", ",", "metavar", "=", "'GROUP/PROJECT'", ",", "required", "=", "True", ",", "help", "=", "'Short name for Git, e.g. forrest/core'", ")", "parser", ".", "add_argument", "(", "'-r'", ",", "'--runway-dir'", ",", "help", "=", "'Runway directory with app.json files, requires --git-short'", ")", "args", "=", "parser", ".", "parse_args", "(", ")", "LOG", ".", "setLevel", "(", "args", ".", "debug", ")", "logging", ".", "getLogger", "(", "__package__", ".", "split", "(", "'.'", ")", "[", "0", "]", ")", ".", "setLevel", "(", "args", ".", "debug", ")", "generated", "=", "gogoutils", ".", "Generator", "(", "*", "gogoutils", ".", "Parser", "(", "args", ".", "git_short", ")", ".", "parse_url", "(", ")", ",", "formats", "=", "APP_FORMATS", ")", "git_short", "=", "generated", ".", "gitlab", "(", ")", "[", "'main'", "]", "if", "args", ".", "runway_dir", ":", "configs", "=", "process_runway_configs", "(", "runway_dir", "=", "args", ".", "runway_dir", ")", "else", ":", "configs", "=", "process_git_configs", "(", "git_short", "=", "git_short", ")", "write_variables", "(", "app_configs", "=", "configs", ",", "out_file", "=", "args", ".", "output", ",", "git_short", "=", "git_short", ")" ]
Append Application Configurations to a given file in multiple formats.
[ "Append", "Application", "Configurations", "to", "a", "given", "file", "in", "multiple", "formats", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/configs/__main__.py#L33-L56
train
foremast/foremast
src/foremast/__main__.py
add_infra
def add_infra(subparsers): """Infrastructure subcommands.""" infra_parser = subparsers.add_parser('infra', help=runner.prepare_infrastructure.__doc__) infra_parser.set_defaults(func=runner.prepare_infrastructure)
python
def add_infra(subparsers): """Infrastructure subcommands.""" infra_parser = subparsers.add_parser('infra', help=runner.prepare_infrastructure.__doc__) infra_parser.set_defaults(func=runner.prepare_infrastructure)
[ "def", "add_infra", "(", "subparsers", ")", ":", "infra_parser", "=", "subparsers", ".", "add_parser", "(", "'infra'", ",", "help", "=", "runner", ".", "prepare_infrastructure", ".", "__doc__", ")", "infra_parser", ".", "set_defaults", "(", "func", "=", "runner", ".", "prepare_infrastructure", ")" ]
Infrastructure subcommands.
[ "Infrastructure", "subcommands", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/__main__.py#L15-L18
train
foremast/foremast
src/foremast/__main__.py
add_pipeline
def add_pipeline(subparsers): """Pipeline subcommands.""" pipeline_parser = subparsers.add_parser( 'pipeline', help=add_pipeline.__doc__, formatter_class=argparse.ArgumentDefaultsHelpFormatter) pipeline_parser.set_defaults(func=pipeline_parser.print_help) pipeline_subparsers = pipeline_parser.add_subparsers(title='Pipelines') pipeline_full_parser = pipeline_subparsers.add_parser( 'app', help=runner.prepare_app_pipeline.__doc__, formatter_class=argparse.ArgumentDefaultsHelpFormatter) pipeline_full_parser.set_defaults(func=runner.prepare_app_pipeline) pipeline_onetime_parser = pipeline_subparsers.add_parser( 'onetime', help=runner.prepare_onetime_pipeline.__doc__, formatter_class=argparse.ArgumentDefaultsHelpFormatter) pipeline_onetime_parser.set_defaults(func=runner.prepare_onetime_pipeline) add_env(pipeline_onetime_parser)
python
def add_pipeline(subparsers): """Pipeline subcommands.""" pipeline_parser = subparsers.add_parser( 'pipeline', help=add_pipeline.__doc__, formatter_class=argparse.ArgumentDefaultsHelpFormatter) pipeline_parser.set_defaults(func=pipeline_parser.print_help) pipeline_subparsers = pipeline_parser.add_subparsers(title='Pipelines') pipeline_full_parser = pipeline_subparsers.add_parser( 'app', help=runner.prepare_app_pipeline.__doc__, formatter_class=argparse.ArgumentDefaultsHelpFormatter) pipeline_full_parser.set_defaults(func=runner.prepare_app_pipeline) pipeline_onetime_parser = pipeline_subparsers.add_parser( 'onetime', help=runner.prepare_onetime_pipeline.__doc__, formatter_class=argparse.ArgumentDefaultsHelpFormatter) pipeline_onetime_parser.set_defaults(func=runner.prepare_onetime_pipeline) add_env(pipeline_onetime_parser)
[ "def", "add_pipeline", "(", "subparsers", ")", ":", "pipeline_parser", "=", "subparsers", ".", "add_parser", "(", "'pipeline'", ",", "help", "=", "add_pipeline", ".", "__doc__", ",", "formatter_class", "=", "argparse", ".", "ArgumentDefaultsHelpFormatter", ")", "pipeline_parser", ".", "set_defaults", "(", "func", "=", "pipeline_parser", ".", "print_help", ")", "pipeline_subparsers", "=", "pipeline_parser", ".", "add_subparsers", "(", "title", "=", "'Pipelines'", ")", "pipeline_full_parser", "=", "pipeline_subparsers", ".", "add_parser", "(", "'app'", ",", "help", "=", "runner", ".", "prepare_app_pipeline", ".", "__doc__", ",", "formatter_class", "=", "argparse", ".", "ArgumentDefaultsHelpFormatter", ")", "pipeline_full_parser", ".", "set_defaults", "(", "func", "=", "runner", ".", "prepare_app_pipeline", ")", "pipeline_onetime_parser", "=", "pipeline_subparsers", ".", "add_parser", "(", "'onetime'", ",", "help", "=", "runner", ".", "prepare_onetime_pipeline", ".", "__doc__", ",", "formatter_class", "=", "argparse", ".", "ArgumentDefaultsHelpFormatter", ")", "pipeline_onetime_parser", ".", "set_defaults", "(", "func", "=", "runner", ".", "prepare_onetime_pipeline", ")", "add_env", "(", "pipeline_onetime_parser", ")" ]
Pipeline subcommands.
[ "Pipeline", "subcommands", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/__main__.py#L21-L36
train
foremast/foremast
src/foremast/__main__.py
add_rebuild
def add_rebuild(subparsers): """Rebuild Pipeline subcommands.""" rebuild_parser = subparsers.add_parser( 'rebuild', help=runner.rebuild_pipelines.__doc__, formatter_class=argparse.ArgumentDefaultsHelpFormatter) rebuild_parser.set_defaults(func=runner.rebuild_pipelines) rebuild_parser.add_argument('-a', '--all', action='store_true', help='Rebuild all Pipelines') rebuild_parser.add_argument( 'project', nargs='?', default=os.getenv('REBUILD_PROJECT'), help='Project to rebuild, overrides $REBUILD_PROJECT')
python
def add_rebuild(subparsers): """Rebuild Pipeline subcommands.""" rebuild_parser = subparsers.add_parser( 'rebuild', help=runner.rebuild_pipelines.__doc__, formatter_class=argparse.ArgumentDefaultsHelpFormatter) rebuild_parser.set_defaults(func=runner.rebuild_pipelines) rebuild_parser.add_argument('-a', '--all', action='store_true', help='Rebuild all Pipelines') rebuild_parser.add_argument( 'project', nargs='?', default=os.getenv('REBUILD_PROJECT'), help='Project to rebuild, overrides $REBUILD_PROJECT')
[ "def", "add_rebuild", "(", "subparsers", ")", ":", "rebuild_parser", "=", "subparsers", ".", "add_parser", "(", "'rebuild'", ",", "help", "=", "runner", ".", "rebuild_pipelines", ".", "__doc__", ",", "formatter_class", "=", "argparse", ".", "ArgumentDefaultsHelpFormatter", ")", "rebuild_parser", ".", "set_defaults", "(", "func", "=", "runner", ".", "rebuild_pipelines", ")", "rebuild_parser", ".", "add_argument", "(", "'-a'", ",", "'--all'", ",", "action", "=", "'store_true'", ",", "help", "=", "'Rebuild all Pipelines'", ")", "rebuild_parser", ".", "add_argument", "(", "'project'", ",", "nargs", "=", "'?'", ",", "default", "=", "os", ".", "getenv", "(", "'REBUILD_PROJECT'", ")", ",", "help", "=", "'Project to rebuild, overrides $REBUILD_PROJECT'", ")" ]
Rebuild Pipeline subcommands.
[ "Rebuild", "Pipeline", "subcommands", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/__main__.py#L39-L49
train
foremast/foremast
src/foremast/__main__.py
add_autoscaling
def add_autoscaling(subparsers): """Auto Scaling Group Policy subcommands.""" autoscaling_parser = subparsers.add_parser( 'autoscaling', help=runner.create_scaling_policy.__doc__, formatter_class=argparse.ArgumentDefaultsHelpFormatter) autoscaling_parser.set_defaults(func=runner.create_scaling_policy)
python
def add_autoscaling(subparsers): """Auto Scaling Group Policy subcommands.""" autoscaling_parser = subparsers.add_parser( 'autoscaling', help=runner.create_scaling_policy.__doc__, formatter_class=argparse.ArgumentDefaultsHelpFormatter) autoscaling_parser.set_defaults(func=runner.create_scaling_policy)
[ "def", "add_autoscaling", "(", "subparsers", ")", ":", "autoscaling_parser", "=", "subparsers", ".", "add_parser", "(", "'autoscaling'", ",", "help", "=", "runner", ".", "create_scaling_policy", ".", "__doc__", ",", "formatter_class", "=", "argparse", ".", "ArgumentDefaultsHelpFormatter", ")", "autoscaling_parser", ".", "set_defaults", "(", "func", "=", "runner", ".", "create_scaling_policy", ")" ]
Auto Scaling Group Policy subcommands.
[ "Auto", "Scaling", "Group", "Policy", "subcommands", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/__main__.py#L52-L58
train
foremast/foremast
src/foremast/__main__.py
add_validate
def add_validate(subparsers): """Validate Spinnaker setup.""" validate_parser = subparsers.add_parser( 'validate', help=add_validate.__doc__, formatter_class=argparse.ArgumentDefaultsHelpFormatter) validate_parser.set_defaults(func=validate_parser.print_help) validate_subparsers = validate_parser.add_subparsers(title='Testers') validate_all_parser = validate_subparsers.add_parser( 'all', help=validate.validate_all.__doc__, formatter_class=argparse.ArgumentDefaultsHelpFormatter) validate_all_parser.set_defaults(func=validate.validate_all) validate_gate_parser = validate_subparsers.add_parser( 'gate', help=validate.validate_gate.__doc__, formatter_class=argparse.ArgumentDefaultsHelpFormatter) validate_gate_parser.set_defaults(func=validate.validate_gate)
python
def add_validate(subparsers): """Validate Spinnaker setup.""" validate_parser = subparsers.add_parser( 'validate', help=add_validate.__doc__, formatter_class=argparse.ArgumentDefaultsHelpFormatter) validate_parser.set_defaults(func=validate_parser.print_help) validate_subparsers = validate_parser.add_subparsers(title='Testers') validate_all_parser = validate_subparsers.add_parser( 'all', help=validate.validate_all.__doc__, formatter_class=argparse.ArgumentDefaultsHelpFormatter) validate_all_parser.set_defaults(func=validate.validate_all) validate_gate_parser = validate_subparsers.add_parser( 'gate', help=validate.validate_gate.__doc__, formatter_class=argparse.ArgumentDefaultsHelpFormatter) validate_gate_parser.set_defaults(func=validate.validate_gate)
[ "def", "add_validate", "(", "subparsers", ")", ":", "validate_parser", "=", "subparsers", ".", "add_parser", "(", "'validate'", ",", "help", "=", "add_validate", ".", "__doc__", ",", "formatter_class", "=", "argparse", ".", "ArgumentDefaultsHelpFormatter", ")", "validate_parser", ".", "set_defaults", "(", "func", "=", "validate_parser", ".", "print_help", ")", "validate_subparsers", "=", "validate_parser", ".", "add_subparsers", "(", "title", "=", "'Testers'", ")", "validate_all_parser", "=", "validate_subparsers", ".", "add_parser", "(", "'all'", ",", "help", "=", "validate", ".", "validate_all", ".", "__doc__", ",", "formatter_class", "=", "argparse", ".", "ArgumentDefaultsHelpFormatter", ")", "validate_all_parser", ".", "set_defaults", "(", "func", "=", "validate", ".", "validate_all", ")", "validate_gate_parser", "=", "validate_subparsers", ".", "add_parser", "(", "'gate'", ",", "help", "=", "validate", ".", "validate_gate", ".", "__doc__", ",", "formatter_class", "=", "argparse", ".", "ArgumentDefaultsHelpFormatter", ")", "validate_gate_parser", ".", "set_defaults", "(", "func", "=", "validate", ".", "validate_gate", ")" ]
Validate Spinnaker setup.
[ "Validate", "Spinnaker", "setup", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/__main__.py#L61-L75
train
foremast/foremast
src/foremast/pipeline/create_pipeline.py
SpinnakerPipeline.get_existing_pipelines
def get_existing_pipelines(self): """Get existing pipeline configs for specific application. Returns: str: Pipeline config json """ url = "{0}/applications/{1}/pipelineConfigs".format(API_URL, self.app_name) resp = requests.get(url, verify=GATE_CA_BUNDLE, cert=GATE_CLIENT_CERT) assert resp.ok, 'Failed to lookup pipelines for {0}: {1}'.format(self.app_name, resp.text) return resp.json()
python
def get_existing_pipelines(self): """Get existing pipeline configs for specific application. Returns: str: Pipeline config json """ url = "{0}/applications/{1}/pipelineConfigs".format(API_URL, self.app_name) resp = requests.get(url, verify=GATE_CA_BUNDLE, cert=GATE_CLIENT_CERT) assert resp.ok, 'Failed to lookup pipelines for {0}: {1}'.format(self.app_name, resp.text) return resp.json()
[ "def", "get_existing_pipelines", "(", "self", ")", ":", "url", "=", "\"{0}/applications/{1}/pipelineConfigs\"", ".", "format", "(", "API_URL", ",", "self", ".", "app_name", ")", "resp", "=", "requests", ".", "get", "(", "url", ",", "verify", "=", "GATE_CA_BUNDLE", ",", "cert", "=", "GATE_CLIENT_CERT", ")", "assert", "resp", ".", "ok", ",", "'Failed to lookup pipelines for {0}: {1}'", ".", "format", "(", "self", ".", "app_name", ",", "resp", ".", "text", ")", "return", "resp", ".", "json", "(", ")" ]
Get existing pipeline configs for specific application. Returns: str: Pipeline config json
[ "Get", "existing", "pipeline", "configs", "for", "specific", "application", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/pipeline/create_pipeline.py#L148-L159
train
foremast/foremast
src/foremast/pipeline/create_pipeline.py
SpinnakerPipeline.compare_with_existing
def compare_with_existing(self, region='us-east-1', onetime=False): """Compare desired pipeline with existing pipelines. Args: region (str): Region of desired pipeline. onetime (bool): Looks for different pipeline if Onetime Returns: str: pipeline_id if existing, empty string of not. """ pipelines = self.get_existing_pipelines() pipeline_id = None found = False for pipeline in pipelines: correct_app_and_region = (pipeline['application'] == self.app_name) and (region in pipeline['name']) if onetime: onetime_str = "(onetime-{})".format(self.environments[0]) if correct_app_and_region and onetime_str in pipeline['name']: found = True elif correct_app_and_region: found = True if found: self.log.info('Existing pipeline found - %s', pipeline['name']) pipeline_id = pipeline['id'] break else: self.log.info('No existing pipeline found') return pipeline_id
python
def compare_with_existing(self, region='us-east-1', onetime=False): """Compare desired pipeline with existing pipelines. Args: region (str): Region of desired pipeline. onetime (bool): Looks for different pipeline if Onetime Returns: str: pipeline_id if existing, empty string of not. """ pipelines = self.get_existing_pipelines() pipeline_id = None found = False for pipeline in pipelines: correct_app_and_region = (pipeline['application'] == self.app_name) and (region in pipeline['name']) if onetime: onetime_str = "(onetime-{})".format(self.environments[0]) if correct_app_and_region and onetime_str in pipeline['name']: found = True elif correct_app_and_region: found = True if found: self.log.info('Existing pipeline found - %s', pipeline['name']) pipeline_id = pipeline['id'] break else: self.log.info('No existing pipeline found') return pipeline_id
[ "def", "compare_with_existing", "(", "self", ",", "region", "=", "'us-east-1'", ",", "onetime", "=", "False", ")", ":", "pipelines", "=", "self", ".", "get_existing_pipelines", "(", ")", "pipeline_id", "=", "None", "found", "=", "False", "for", "pipeline", "in", "pipelines", ":", "correct_app_and_region", "=", "(", "pipeline", "[", "'application'", "]", "==", "self", ".", "app_name", ")", "and", "(", "region", "in", "pipeline", "[", "'name'", "]", ")", "if", "onetime", ":", "onetime_str", "=", "\"(onetime-{})\"", ".", "format", "(", "self", ".", "environments", "[", "0", "]", ")", "if", "correct_app_and_region", "and", "onetime_str", "in", "pipeline", "[", "'name'", "]", ":", "found", "=", "True", "elif", "correct_app_and_region", ":", "found", "=", "True", "if", "found", ":", "self", ".", "log", ".", "info", "(", "'Existing pipeline found - %s'", ",", "pipeline", "[", "'name'", "]", ")", "pipeline_id", "=", "pipeline", "[", "'id'", "]", "break", "else", ":", "self", ".", "log", ".", "info", "(", "'No existing pipeline found'", ")", "return", "pipeline_id" ]
Compare desired pipeline with existing pipelines. Args: region (str): Region of desired pipeline. onetime (bool): Looks for different pipeline if Onetime Returns: str: pipeline_id if existing, empty string of not.
[ "Compare", "desired", "pipeline", "with", "existing", "pipelines", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/pipeline/create_pipeline.py#L161-L191
train
foremast/foremast
src/foremast/pipeline/create_pipeline.py
SpinnakerPipeline.create_pipeline
def create_pipeline(self): """Main wrapper for pipeline creation. 1. Runs clean_pipelines to clean up existing ones 2. determines which environments the pipeline needs 3. gets all subnets for template rendering 4. Renders all of the pipeline blocks as defined in configs 5. Runs post_pipeline to create pipeline """ clean_pipelines(app=self.app_name, settings=self.settings) pipeline_envs = self.environments self.log.debug('Envs from pipeline.json: %s', pipeline_envs) regions_envs = collections.defaultdict(list) for env in pipeline_envs: for region in self.settings[env]['regions']: regions_envs[region].append(env) self.log.info('Environments and Regions for Pipelines:\n%s', json.dumps(regions_envs, indent=4)) subnets = None pipelines = {} for region, envs in regions_envs.items(): self.generated.data.update({ 'region': region, }) # TODO: Overrides for an environment no longer makes sense. Need to # provide override for entire Region possibly. pipelines[region] = self.render_wrapper(region=region) previous_env = None for env in envs: self.generated.data.update({ 'env': env, }) pipeline_block_data = { "env": env, "generated": self.generated, "previous_env": previous_env, "region": region, "settings": self.settings[env][region], "pipeline_data": self.settings['pipeline'], } if self.settings['pipeline']['type'] in EC2_PIPELINE_TYPES: if not subnets: subnets = get_subnets() try: region_subnets = {region: subnets[env][region]} except KeyError: self.log.info('%s is not available for %s.', env, region) continue pipeline_block_data['region_subnets'] = region_subnets block = construct_pipeline_block(**pipeline_block_data) pipelines[region]['stages'].extend(json.loads(block)) previous_env = env self.log.debug('Assembled Pipelines:\n%s', pformat(pipelines)) for region, pipeline in pipelines.items(): renumerate_stages(pipeline) self.post_pipeline(pipeline) return True
python
def create_pipeline(self): """Main wrapper for pipeline creation. 1. Runs clean_pipelines to clean up existing ones 2. determines which environments the pipeline needs 3. gets all subnets for template rendering 4. Renders all of the pipeline blocks as defined in configs 5. Runs post_pipeline to create pipeline """ clean_pipelines(app=self.app_name, settings=self.settings) pipeline_envs = self.environments self.log.debug('Envs from pipeline.json: %s', pipeline_envs) regions_envs = collections.defaultdict(list) for env in pipeline_envs: for region in self.settings[env]['regions']: regions_envs[region].append(env) self.log.info('Environments and Regions for Pipelines:\n%s', json.dumps(regions_envs, indent=4)) subnets = None pipelines = {} for region, envs in regions_envs.items(): self.generated.data.update({ 'region': region, }) # TODO: Overrides for an environment no longer makes sense. Need to # provide override for entire Region possibly. pipelines[region] = self.render_wrapper(region=region) previous_env = None for env in envs: self.generated.data.update({ 'env': env, }) pipeline_block_data = { "env": env, "generated": self.generated, "previous_env": previous_env, "region": region, "settings": self.settings[env][region], "pipeline_data": self.settings['pipeline'], } if self.settings['pipeline']['type'] in EC2_PIPELINE_TYPES: if not subnets: subnets = get_subnets() try: region_subnets = {region: subnets[env][region]} except KeyError: self.log.info('%s is not available for %s.', env, region) continue pipeline_block_data['region_subnets'] = region_subnets block = construct_pipeline_block(**pipeline_block_data) pipelines[region]['stages'].extend(json.loads(block)) previous_env = env self.log.debug('Assembled Pipelines:\n%s', pformat(pipelines)) for region, pipeline in pipelines.items(): renumerate_stages(pipeline) self.post_pipeline(pipeline) return True
[ "def", "create_pipeline", "(", "self", ")", ":", "clean_pipelines", "(", "app", "=", "self", ".", "app_name", ",", "settings", "=", "self", ".", "settings", ")", "pipeline_envs", "=", "self", ".", "environments", "self", ".", "log", ".", "debug", "(", "'Envs from pipeline.json: %s'", ",", "pipeline_envs", ")", "regions_envs", "=", "collections", ".", "defaultdict", "(", "list", ")", "for", "env", "in", "pipeline_envs", ":", "for", "region", "in", "self", ".", "settings", "[", "env", "]", "[", "'regions'", "]", ":", "regions_envs", "[", "region", "]", ".", "append", "(", "env", ")", "self", ".", "log", ".", "info", "(", "'Environments and Regions for Pipelines:\\n%s'", ",", "json", ".", "dumps", "(", "regions_envs", ",", "indent", "=", "4", ")", ")", "subnets", "=", "None", "pipelines", "=", "{", "}", "for", "region", ",", "envs", "in", "regions_envs", ".", "items", "(", ")", ":", "self", ".", "generated", ".", "data", ".", "update", "(", "{", "'region'", ":", "region", ",", "}", ")", "# TODO: Overrides for an environment no longer makes sense. Need to", "# provide override for entire Region possibly.", "pipelines", "[", "region", "]", "=", "self", ".", "render_wrapper", "(", "region", "=", "region", ")", "previous_env", "=", "None", "for", "env", "in", "envs", ":", "self", ".", "generated", ".", "data", ".", "update", "(", "{", "'env'", ":", "env", ",", "}", ")", "pipeline_block_data", "=", "{", "\"env\"", ":", "env", ",", "\"generated\"", ":", "self", ".", "generated", ",", "\"previous_env\"", ":", "previous_env", ",", "\"region\"", ":", "region", ",", "\"settings\"", ":", "self", ".", "settings", "[", "env", "]", "[", "region", "]", ",", "\"pipeline_data\"", ":", "self", ".", "settings", "[", "'pipeline'", "]", ",", "}", "if", "self", ".", "settings", "[", "'pipeline'", "]", "[", "'type'", "]", "in", "EC2_PIPELINE_TYPES", ":", "if", "not", "subnets", ":", "subnets", "=", "get_subnets", "(", ")", "try", ":", "region_subnets", "=", "{", "region", ":", "subnets", "[", "env", "]", "[", "region", "]", "}", "except", "KeyError", ":", "self", ".", "log", ".", "info", "(", "'%s is not available for %s.'", ",", "env", ",", "region", ")", "continue", "pipeline_block_data", "[", "'region_subnets'", "]", "=", "region_subnets", "block", "=", "construct_pipeline_block", "(", "*", "*", "pipeline_block_data", ")", "pipelines", "[", "region", "]", "[", "'stages'", "]", ".", "extend", "(", "json", ".", "loads", "(", "block", ")", ")", "previous_env", "=", "env", "self", ".", "log", ".", "debug", "(", "'Assembled Pipelines:\\n%s'", ",", "pformat", "(", "pipelines", ")", ")", "for", "region", ",", "pipeline", "in", "pipelines", ".", "items", "(", ")", ":", "renumerate_stages", "(", "pipeline", ")", "self", ".", "post_pipeline", "(", "pipeline", ")", "return", "True" ]
Main wrapper for pipeline creation. 1. Runs clean_pipelines to clean up existing ones 2. determines which environments the pipeline needs 3. gets all subnets for template rendering 4. Renders all of the pipeline blocks as defined in configs 5. Runs post_pipeline to create pipeline
[ "Main", "wrapper", "for", "pipeline", "creation", ".", "1", ".", "Runs", "clean_pipelines", "to", "clean", "up", "existing", "ones", "2", ".", "determines", "which", "environments", "the", "pipeline", "needs", "3", ".", "gets", "all", "subnets", "for", "template", "rendering", "4", ".", "Renders", "all", "of", "the", "pipeline", "blocks", "as", "defined", "in", "configs", "5", ".", "Runs", "post_pipeline", "to", "create", "pipeline" ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/pipeline/create_pipeline.py#L193-L259
train
foremast/foremast
src/foremast/utils/lookups.py
ami_lookup
def ami_lookup(region='us-east-1', name='tomcat8'): """Look up AMI ID. Use _name_ to find AMI ID. If no ami_base_url or gitlab_token is provided, _name_ is returned as the ami id. Args: region (str): AWS Region to find AMI ID. name (str): Simple AMI base name to lookup. Returns: str: AMI ID for _name_ in _region_. """ if AMI_JSON_URL: ami_dict = _get_ami_dict(AMI_JSON_URL) ami_id = ami_dict[region][name] elif GITLAB_TOKEN: warn_user('Use AMI_JSON_URL feature instead.') ami_contents = _get_ami_file(region=region) ami_dict = json.loads(ami_contents) ami_id = ami_dict[name] else: ami_id = name LOG.info('Using AMI: %s', ami_id) return ami_id
python
def ami_lookup(region='us-east-1', name='tomcat8'): """Look up AMI ID. Use _name_ to find AMI ID. If no ami_base_url or gitlab_token is provided, _name_ is returned as the ami id. Args: region (str): AWS Region to find AMI ID. name (str): Simple AMI base name to lookup. Returns: str: AMI ID for _name_ in _region_. """ if AMI_JSON_URL: ami_dict = _get_ami_dict(AMI_JSON_URL) ami_id = ami_dict[region][name] elif GITLAB_TOKEN: warn_user('Use AMI_JSON_URL feature instead.') ami_contents = _get_ami_file(region=region) ami_dict = json.loads(ami_contents) ami_id = ami_dict[name] else: ami_id = name LOG.info('Using AMI: %s', ami_id) return ami_id
[ "def", "ami_lookup", "(", "region", "=", "'us-east-1'", ",", "name", "=", "'tomcat8'", ")", ":", "if", "AMI_JSON_URL", ":", "ami_dict", "=", "_get_ami_dict", "(", "AMI_JSON_URL", ")", "ami_id", "=", "ami_dict", "[", "region", "]", "[", "name", "]", "elif", "GITLAB_TOKEN", ":", "warn_user", "(", "'Use AMI_JSON_URL feature instead.'", ")", "ami_contents", "=", "_get_ami_file", "(", "region", "=", "region", ")", "ami_dict", "=", "json", ".", "loads", "(", "ami_contents", ")", "ami_id", "=", "ami_dict", "[", "name", "]", "else", ":", "ami_id", "=", "name", "LOG", ".", "info", "(", "'Using AMI: %s'", ",", "ami_id", ")", "return", "ami_id" ]
Look up AMI ID. Use _name_ to find AMI ID. If no ami_base_url or gitlab_token is provided, _name_ is returned as the ami id. Args: region (str): AWS Region to find AMI ID. name (str): Simple AMI base name to lookup. Returns: str: AMI ID for _name_ in _region_.
[ "Look", "up", "AMI", "ID", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/utils/lookups.py#L32-L59
train
foremast/foremast
src/foremast/utils/lookups.py
_get_ami_file
def _get_ami_file(region='us-east-1'): """Get file from Gitlab. Args: region (str): AWS Region to find AMI ID. Returns: str: Contents in json format. """ LOG.info("Getting AMI from Gitlab") lookup = FileLookup(git_short='devops/ansible') filename = 'scripts/{0}.json'.format(region) ami_contents = lookup.remote_file(filename=filename, branch='master') LOG.debug('AMI file contents in %s: %s', filename, ami_contents) return ami_contents
python
def _get_ami_file(region='us-east-1'): """Get file from Gitlab. Args: region (str): AWS Region to find AMI ID. Returns: str: Contents in json format. """ LOG.info("Getting AMI from Gitlab") lookup = FileLookup(git_short='devops/ansible') filename = 'scripts/{0}.json'.format(region) ami_contents = lookup.remote_file(filename=filename, branch='master') LOG.debug('AMI file contents in %s: %s', filename, ami_contents) return ami_contents
[ "def", "_get_ami_file", "(", "region", "=", "'us-east-1'", ")", ":", "LOG", ".", "info", "(", "\"Getting AMI from Gitlab\"", ")", "lookup", "=", "FileLookup", "(", "git_short", "=", "'devops/ansible'", ")", "filename", "=", "'scripts/{0}.json'", ".", "format", "(", "region", ")", "ami_contents", "=", "lookup", ".", "remote_file", "(", "filename", "=", "filename", ",", "branch", "=", "'master'", ")", "LOG", ".", "debug", "(", "'AMI file contents in %s: %s'", ",", "filename", ",", "ami_contents", ")", "return", "ami_contents" ]
Get file from Gitlab. Args: region (str): AWS Region to find AMI ID. Returns: str: Contents in json format.
[ "Get", "file", "from", "Gitlab", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/utils/lookups.py#L62-L77
train
foremast/foremast
src/foremast/utils/lookups.py
_get_ami_dict
def _get_ami_dict(json_url): """Get ami from a web url. Args: region (str): AWS Region to find AMI ID. Returns: dict: Contents in dictionary format. """ LOG.info("Getting AMI from %s", json_url) response = requests.get(json_url) assert response.ok, "Error getting ami info from {}".format(json_url) ami_dict = response.json() LOG.debug('AMI json contents: %s', ami_dict) return ami_dict
python
def _get_ami_dict(json_url): """Get ami from a web url. Args: region (str): AWS Region to find AMI ID. Returns: dict: Contents in dictionary format. """ LOG.info("Getting AMI from %s", json_url) response = requests.get(json_url) assert response.ok, "Error getting ami info from {}".format(json_url) ami_dict = response.json() LOG.debug('AMI json contents: %s', ami_dict) return ami_dict
[ "def", "_get_ami_dict", "(", "json_url", ")", ":", "LOG", ".", "info", "(", "\"Getting AMI from %s\"", ",", "json_url", ")", "response", "=", "requests", ".", "get", "(", "json_url", ")", "assert", "response", ".", "ok", ",", "\"Error getting ami info from {}\"", ".", "format", "(", "json_url", ")", "ami_dict", "=", "response", ".", "json", "(", ")", "LOG", ".", "debug", "(", "'AMI json contents: %s'", ",", "ami_dict", ")", "return", "ami_dict" ]
Get ami from a web url. Args: region (str): AWS Region to find AMI ID. Returns: dict: Contents in dictionary format.
[ "Get", "ami", "from", "a", "web", "url", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/utils/lookups.py#L80-L95
train
foremast/foremast
src/foremast/utils/lookups.py
FileLookup.get_gitlab_project
def get_gitlab_project(self): """Get numerical GitLab Project ID. Returns: int: Project ID number. Raises: foremast.exceptions.GitLabApiError: GitLab responded with bad status code. """ self.server = gitlab.Gitlab(GIT_URL, private_token=GITLAB_TOKEN, api_version=4) project = self.server.projects.get(self.git_short) if not project: raise GitLabApiError('Could not get Project "{0}" from GitLab API.'.format(self.git_short)) self.project = project return self.project
python
def get_gitlab_project(self): """Get numerical GitLab Project ID. Returns: int: Project ID number. Raises: foremast.exceptions.GitLabApiError: GitLab responded with bad status code. """ self.server = gitlab.Gitlab(GIT_URL, private_token=GITLAB_TOKEN, api_version=4) project = self.server.projects.get(self.git_short) if not project: raise GitLabApiError('Could not get Project "{0}" from GitLab API.'.format(self.git_short)) self.project = project return self.project
[ "def", "get_gitlab_project", "(", "self", ")", ":", "self", ".", "server", "=", "gitlab", ".", "Gitlab", "(", "GIT_URL", ",", "private_token", "=", "GITLAB_TOKEN", ",", "api_version", "=", "4", ")", "project", "=", "self", ".", "server", ".", "projects", ".", "get", "(", "self", ".", "git_short", ")", "if", "not", "project", ":", "raise", "GitLabApiError", "(", "'Could not get Project \"{0}\" from GitLab API.'", ".", "format", "(", "self", ".", "git_short", ")", ")", "self", ".", "project", "=", "project", "return", "self", ".", "project" ]
Get numerical GitLab Project ID. Returns: int: Project ID number. Raises: foremast.exceptions.GitLabApiError: GitLab responded with bad status code.
[ "Get", "numerical", "GitLab", "Project", "ID", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/utils/lookups.py#L121-L139
train
foremast/foremast
src/foremast/utils/lookups.py
FileLookup.local_file
def local_file(self, filename): """Read the local file in _self.runway_dir_. Args: filename (str): Name of file to retrieve relative to root of _runway_dir_. Returns: str: Contents of local file. Raises: FileNotFoundError: Requested file missing. """ LOG.info('Retrieving "%s" from "%s".', filename, self.runway_dir) file_contents = '' file_path = os.path.join(self.runway_dir, filename) try: with open(file_path, 'rt') as lookup_file: file_contents = lookup_file.read() except FileNotFoundError: LOG.warning('File missing "%s".', file_path) raise LOG.debug('Local file contents:\n%s', file_contents) return file_contents
python
def local_file(self, filename): """Read the local file in _self.runway_dir_. Args: filename (str): Name of file to retrieve relative to root of _runway_dir_. Returns: str: Contents of local file. Raises: FileNotFoundError: Requested file missing. """ LOG.info('Retrieving "%s" from "%s".', filename, self.runway_dir) file_contents = '' file_path = os.path.join(self.runway_dir, filename) try: with open(file_path, 'rt') as lookup_file: file_contents = lookup_file.read() except FileNotFoundError: LOG.warning('File missing "%s".', file_path) raise LOG.debug('Local file contents:\n%s', file_contents) return file_contents
[ "def", "local_file", "(", "self", ",", "filename", ")", ":", "LOG", ".", "info", "(", "'Retrieving \"%s\" from \"%s\".'", ",", "filename", ",", "self", ".", "runway_dir", ")", "file_contents", "=", "''", "file_path", "=", "os", ".", "path", ".", "join", "(", "self", ".", "runway_dir", ",", "filename", ")", "try", ":", "with", "open", "(", "file_path", ",", "'rt'", ")", "as", "lookup_file", ":", "file_contents", "=", "lookup_file", ".", "read", "(", ")", "except", "FileNotFoundError", ":", "LOG", ".", "warning", "(", "'File missing \"%s\".'", ",", "file_path", ")", "raise", "LOG", ".", "debug", "(", "'Local file contents:\\n%s'", ",", "file_contents", ")", "return", "file_contents" ]
Read the local file in _self.runway_dir_. Args: filename (str): Name of file to retrieve relative to root of _runway_dir_. Returns: str: Contents of local file. Raises: FileNotFoundError: Requested file missing.
[ "Read", "the", "local", "file", "in", "_self", ".", "runway_dir_", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/utils/lookups.py#L141-L169
train
foremast/foremast
src/foremast/utils/lookups.py
FileLookup.remote_file
def remote_file(self, branch='master', filename=''): """Read the remote file on Git Server. Args: branch (str): Git Branch to find file. filename (str): Name of file to retrieve relative to root of repository. Returns: str: Contents of remote file. Raises: FileNotFoundError: Requested file missing. """ LOG.info('Retrieving "%s" from "%s".', filename, self.git_short) file_contents = '' try: file_blob = self.project.files.get(file_path=filename, ref=branch) except gitlab.exceptions.GitlabGetError: file_blob = None LOG.debug('GitLab file response:\n%s', file_blob) if not file_blob: msg = 'Project "{0}" is missing file "{1}" in "{2}" branch.'.format(self.git_short, filename, branch) LOG.warning(msg) raise FileNotFoundError(msg) else: file_contents = b64decode(file_blob.content).decode() LOG.debug('Remote file contents:\n%s', file_contents) return file_contents
python
def remote_file(self, branch='master', filename=''): """Read the remote file on Git Server. Args: branch (str): Git Branch to find file. filename (str): Name of file to retrieve relative to root of repository. Returns: str: Contents of remote file. Raises: FileNotFoundError: Requested file missing. """ LOG.info('Retrieving "%s" from "%s".', filename, self.git_short) file_contents = '' try: file_blob = self.project.files.get(file_path=filename, ref=branch) except gitlab.exceptions.GitlabGetError: file_blob = None LOG.debug('GitLab file response:\n%s', file_blob) if not file_blob: msg = 'Project "{0}" is missing file "{1}" in "{2}" branch.'.format(self.git_short, filename, branch) LOG.warning(msg) raise FileNotFoundError(msg) else: file_contents = b64decode(file_blob.content).decode() LOG.debug('Remote file contents:\n%s', file_contents) return file_contents
[ "def", "remote_file", "(", "self", ",", "branch", "=", "'master'", ",", "filename", "=", "''", ")", ":", "LOG", ".", "info", "(", "'Retrieving \"%s\" from \"%s\".'", ",", "filename", ",", "self", ".", "git_short", ")", "file_contents", "=", "''", "try", ":", "file_blob", "=", "self", ".", "project", ".", "files", ".", "get", "(", "file_path", "=", "filename", ",", "ref", "=", "branch", ")", "except", "gitlab", ".", "exceptions", ".", "GitlabGetError", ":", "file_blob", "=", "None", "LOG", ".", "debug", "(", "'GitLab file response:\\n%s'", ",", "file_blob", ")", "if", "not", "file_blob", ":", "msg", "=", "'Project \"{0}\" is missing file \"{1}\" in \"{2}\" branch.'", ".", "format", "(", "self", ".", "git_short", ",", "filename", ",", "branch", ")", "LOG", ".", "warning", "(", "msg", ")", "raise", "FileNotFoundError", "(", "msg", ")", "else", ":", "file_contents", "=", "b64decode", "(", "file_blob", ".", "content", ")", ".", "decode", "(", ")", "LOG", ".", "debug", "(", "'Remote file contents:\\n%s'", ",", "file_contents", ")", "return", "file_contents" ]
Read the remote file on Git Server. Args: branch (str): Git Branch to find file. filename (str): Name of file to retrieve relative to root of repository. Returns: str: Contents of remote file. Raises: FileNotFoundError: Requested file missing.
[ "Read", "the", "remote", "file", "on", "Git", "Server", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/utils/lookups.py#L171-L205
train
foremast/foremast
src/foremast/utils/banners.py
banner
def banner(text, border='=', width=80): """Center _text_ in a banner _width_ wide with _border_ characters. Args: text (str): What to write in the banner border (str): Border character width (int): How long the border should be """ text_padding = '{0:^%d}' % (width) LOG.info(border * width) LOG.info(text_padding.format(text)) LOG.info(border * width)
python
def banner(text, border='=', width=80): """Center _text_ in a banner _width_ wide with _border_ characters. Args: text (str): What to write in the banner border (str): Border character width (int): How long the border should be """ text_padding = '{0:^%d}' % (width) LOG.info(border * width) LOG.info(text_padding.format(text)) LOG.info(border * width)
[ "def", "banner", "(", "text", ",", "border", "=", "'='", ",", "width", "=", "80", ")", ":", "text_padding", "=", "'{0:^%d}'", "%", "(", "width", ")", "LOG", ".", "info", "(", "border", "*", "width", ")", "LOG", ".", "info", "(", "text_padding", ".", "format", "(", "text", ")", ")", "LOG", ".", "info", "(", "border", "*", "width", ")" ]
Center _text_ in a banner _width_ wide with _border_ characters. Args: text (str): What to write in the banner border (str): Border character width (int): How long the border should be
[ "Center", "_text_", "in", "a", "banner", "_width_", "wide", "with", "_border_", "characters", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/utils/banners.py#L30-L41
train
foremast/foremast
src/foremast/utils/get_sns_topic_arn.py
get_sns_topic_arn
def get_sns_topic_arn(topic_name, account, region): """Get SNS topic ARN. Args: topic_name (str): Name of the topic to lookup. account (str): Environment, e.g. dev region (str): Region name, e.g. us-east-1 Returns: str: ARN for requested topic name """ if topic_name.count(':') == 5 and topic_name.startswith('arn:aws:sns:'): return topic_name session = boto3.Session(profile_name=account, region_name=region) sns_client = session.client('sns') topics = sns_client.list_topics()['Topics'] matched_topic = None for topic in topics: topic_arn = topic['TopicArn'] if topic_name == topic_arn.split(':')[-1]: matched_topic = topic_arn break else: LOG.critical("No topic with name %s found.", topic_name) raise SNSTopicNotFound('No topic with name {0} found'.format(topic_name)) return matched_topic
python
def get_sns_topic_arn(topic_name, account, region): """Get SNS topic ARN. Args: topic_name (str): Name of the topic to lookup. account (str): Environment, e.g. dev region (str): Region name, e.g. us-east-1 Returns: str: ARN for requested topic name """ if topic_name.count(':') == 5 and topic_name.startswith('arn:aws:sns:'): return topic_name session = boto3.Session(profile_name=account, region_name=region) sns_client = session.client('sns') topics = sns_client.list_topics()['Topics'] matched_topic = None for topic in topics: topic_arn = topic['TopicArn'] if topic_name == topic_arn.split(':')[-1]: matched_topic = topic_arn break else: LOG.critical("No topic with name %s found.", topic_name) raise SNSTopicNotFound('No topic with name {0} found'.format(topic_name)) return matched_topic
[ "def", "get_sns_topic_arn", "(", "topic_name", ",", "account", ",", "region", ")", ":", "if", "topic_name", ".", "count", "(", "':'", ")", "==", "5", "and", "topic_name", ".", "startswith", "(", "'arn:aws:sns:'", ")", ":", "return", "topic_name", "session", "=", "boto3", ".", "Session", "(", "profile_name", "=", "account", ",", "region_name", "=", "region", ")", "sns_client", "=", "session", ".", "client", "(", "'sns'", ")", "topics", "=", "sns_client", ".", "list_topics", "(", ")", "[", "'Topics'", "]", "matched_topic", "=", "None", "for", "topic", "in", "topics", ":", "topic_arn", "=", "topic", "[", "'TopicArn'", "]", "if", "topic_name", "==", "topic_arn", ".", "split", "(", "':'", ")", "[", "-", "1", "]", ":", "matched_topic", "=", "topic_arn", "break", "else", ":", "LOG", ".", "critical", "(", "\"No topic with name %s found.\"", ",", "topic_name", ")", "raise", "SNSTopicNotFound", "(", "'No topic with name {0} found'", ".", "format", "(", "topic_name", ")", ")", "return", "matched_topic" ]
Get SNS topic ARN. Args: topic_name (str): Name of the topic to lookup. account (str): Environment, e.g. dev region (str): Region name, e.g. us-east-1 Returns: str: ARN for requested topic name
[ "Get", "SNS", "topic", "ARN", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/utils/get_sns_topic_arn.py#L11-L39
train
foremast/foremast
src/foremast/slacknotify/slack_notification.py
SlackNotification.notify_slack_channel
def notify_slack_channel(self): """Post message to a defined Slack channel.""" message = get_template(template_file='slack/pipeline-prepare-ran.j2', info=self.info) if self.settings['pipeline']['notifications']['slack']: post_slack_message( message=message, channel=self.settings['pipeline']['notifications']['slack'], username='pipeline-bot', icon_emoji=':gear:')
python
def notify_slack_channel(self): """Post message to a defined Slack channel.""" message = get_template(template_file='slack/pipeline-prepare-ran.j2', info=self.info) if self.settings['pipeline']['notifications']['slack']: post_slack_message( message=message, channel=self.settings['pipeline']['notifications']['slack'], username='pipeline-bot', icon_emoji=':gear:')
[ "def", "notify_slack_channel", "(", "self", ")", ":", "message", "=", "get_template", "(", "template_file", "=", "'slack/pipeline-prepare-ran.j2'", ",", "info", "=", "self", ".", "info", ")", "if", "self", ".", "settings", "[", "'pipeline'", "]", "[", "'notifications'", "]", "[", "'slack'", "]", ":", "post_slack_message", "(", "message", "=", "message", ",", "channel", "=", "self", ".", "settings", "[", "'pipeline'", "]", "[", "'notifications'", "]", "[", "'slack'", "]", ",", "username", "=", "'pipeline-bot'", ",", "icon_emoji", "=", "':gear:'", ")" ]
Post message to a defined Slack channel.
[ "Post", "message", "to", "a", "defined", "Slack", "channel", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/slacknotify/slack_notification.py#L54-L63
train
foremast/foremast
src/foremast/utils/properties.py
get_properties
def get_properties(properties_file='raw.properties.json', env=None, region=None): """Get contents of _properties_file_ for the _env_. Args: properties_file (str): File name of `create-configs` JSON output. env (str): Environment to read optionally. region (str): Region to get specific configs for. Returns: dict: JSON loaded Application properties for _env_. None: Given _env_ was not found in `create-configs` JSON output. """ with open(properties_file, 'rt') as file_handle: properties = json.load(file_handle) env_properties = properties.get(env, properties) contents = env_properties.get(region, env_properties) LOG.debug('Found properties for %s:\n%s', env, contents) return contents
python
def get_properties(properties_file='raw.properties.json', env=None, region=None): """Get contents of _properties_file_ for the _env_. Args: properties_file (str): File name of `create-configs` JSON output. env (str): Environment to read optionally. region (str): Region to get specific configs for. Returns: dict: JSON loaded Application properties for _env_. None: Given _env_ was not found in `create-configs` JSON output. """ with open(properties_file, 'rt') as file_handle: properties = json.load(file_handle) env_properties = properties.get(env, properties) contents = env_properties.get(region, env_properties) LOG.debug('Found properties for %s:\n%s', env, contents) return contents
[ "def", "get_properties", "(", "properties_file", "=", "'raw.properties.json'", ",", "env", "=", "None", ",", "region", "=", "None", ")", ":", "with", "open", "(", "properties_file", ",", "'rt'", ")", "as", "file_handle", ":", "properties", "=", "json", ".", "load", "(", "file_handle", ")", "env_properties", "=", "properties", ".", "get", "(", "env", ",", "properties", ")", "contents", "=", "env_properties", ".", "get", "(", "region", ",", "env_properties", ")", "LOG", ".", "debug", "(", "'Found properties for %s:\\n%s'", ",", "env", ",", "contents", ")", "return", "contents" ]
Get contents of _properties_file_ for the _env_. Args: properties_file (str): File name of `create-configs` JSON output. env (str): Environment to read optionally. region (str): Region to get specific configs for. Returns: dict: JSON loaded Application properties for _env_. None: Given _env_ was not found in `create-configs` JSON output.
[ "Get", "contents", "of", "_properties_file_", "for", "the", "_env_", "." ]
fb70f29b8ce532f061685a17d120486e47b215ba
https://github.com/foremast/foremast/blob/fb70f29b8ce532f061685a17d120486e47b215ba/src/foremast/utils/properties.py#L23-L42
train