Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes. See raw diff
- data/lib/python3.10/site-packages/awscli/bcdoc/__init__.py +13 -0
- data/lib/python3.10/site-packages/awscli/bcdoc/__pycache__/__init__.cpython-310.pyc +0 -0
- data/lib/python3.10/site-packages/awscli/bcdoc/__pycache__/docevents.cpython-310.pyc +0 -0
- data/lib/python3.10/site-packages/awscli/bcdoc/__pycache__/docstringparser.cpython-310.pyc +0 -0
- data/lib/python3.10/site-packages/awscli/bcdoc/__pycache__/restdoc.cpython-310.pyc +0 -0
- data/lib/python3.10/site-packages/awscli/bcdoc/__pycache__/style.cpython-310.pyc +0 -0
- data/lib/python3.10/site-packages/awscli/bcdoc/__pycache__/textwriter.cpython-310.pyc +0 -0
- data/lib/python3.10/site-packages/awscli/bcdoc/docevents.py +106 -0
- data/lib/python3.10/site-packages/awscli/bcdoc/docstringparser.py +203 -0
- data/lib/python3.10/site-packages/awscli/bcdoc/restdoc.py +240 -0
- data/lib/python3.10/site-packages/awscli/bcdoc/style.py +418 -0
- data/lib/python3.10/site-packages/awscli/bcdoc/textwriter.py +799 -0
- data/lib/python3.10/site-packages/awscli/customizations/cloudformation/__init__.py +31 -0
- data/lib/python3.10/site-packages/awscli/customizations/cloudformation/__pycache__/__init__.cpython-310.pyc +0 -0
- data/lib/python3.10/site-packages/awscli/customizations/cloudformation/__pycache__/artifact_exporter.cpython-310.pyc +0 -0
- data/lib/python3.10/site-packages/awscli/customizations/cloudformation/__pycache__/deploy.cpython-310.pyc +0 -0
- data/lib/python3.10/site-packages/awscli/customizations/cloudformation/__pycache__/deployer.cpython-310.pyc +0 -0
- data/lib/python3.10/site-packages/awscli/customizations/cloudformation/__pycache__/exceptions.cpython-310.pyc +0 -0
- data/lib/python3.10/site-packages/awscli/customizations/cloudformation/__pycache__/package.cpython-310.pyc +0 -0
- data/lib/python3.10/site-packages/awscli/customizations/cloudformation/__pycache__/yamlhelper.cpython-310.pyc +0 -0
- data/lib/python3.10/site-packages/awscli/customizations/cloudformation/artifact_exporter.py +683 -0
- data/lib/python3.10/site-packages/awscli/customizations/cloudformation/deploy.py +416 -0
- data/lib/python3.10/site-packages/awscli/customizations/cloudformation/deployer.py +232 -0
- data/lib/python3.10/site-packages/awscli/customizations/cloudformation/exceptions.py +59 -0
- data/lib/python3.10/site-packages/awscli/customizations/cloudformation/package.py +181 -0
- data/lib/python3.10/site-packages/awscli/customizations/cloudformation/yamlhelper.py +104 -0
- data/lib/python3.10/site-packages/awscli/customizations/dlm/__init__.py +12 -0
- data/lib/python3.10/site-packages/awscli/customizations/dlm/__pycache__/__init__.cpython-310.pyc +0 -0
- data/lib/python3.10/site-packages/awscli/customizations/dlm/__pycache__/constants.cpython-310.pyc +0 -0
- data/lib/python3.10/site-packages/awscli/customizations/dlm/__pycache__/createdefaultrole.cpython-310.pyc +0 -0
- data/lib/python3.10/site-packages/awscli/customizations/dlm/__pycache__/dlm.cpython-310.pyc +0 -0
- data/lib/python3.10/site-packages/awscli/customizations/dlm/__pycache__/iam.cpython-310.pyc +0 -0
- data/lib/python3.10/site-packages/awscli/customizations/dlm/constants.py +53 -0
- data/lib/python3.10/site-packages/awscli/customizations/dlm/createdefaultrole.py +167 -0
- data/lib/python3.10/site-packages/awscli/customizations/dlm/dlm.py +30 -0
- data/lib/python3.10/site-packages/awscli/customizations/dlm/iam.py +51 -0
- data/lib/python3.10/site-packages/awscli/customizations/eks/__init__.py +31 -0
- data/lib/python3.10/site-packages/awscli/customizations/eks/__pycache__/__init__.cpython-310.pyc +0 -0
- data/lib/python3.10/site-packages/awscli/customizations/eks/__pycache__/exceptions.cpython-310.pyc +0 -0
- data/lib/python3.10/site-packages/awscli/customizations/eks/__pycache__/get_token.cpython-310.pyc +0 -0
- data/lib/python3.10/site-packages/awscli/customizations/eks/__pycache__/kubeconfig.cpython-310.pyc +0 -0
- data/lib/python3.10/site-packages/awscli/customizations/eks/__pycache__/ordered_yaml.cpython-310.pyc +0 -0
- data/lib/python3.10/site-packages/awscli/customizations/eks/__pycache__/update_kubeconfig.cpython-310.pyc +0 -0
- data/lib/python3.10/site-packages/awscli/customizations/eks/exceptions.py +20 -0
- data/lib/python3.10/site-packages/awscli/customizations/eks/get_token.py +276 -0
- data/lib/python3.10/site-packages/awscli/customizations/eks/kubeconfig.py +281 -0
- data/lib/python3.10/site-packages/awscli/customizations/eks/ordered_yaml.py +62 -0
- data/lib/python3.10/site-packages/awscli/customizations/eks/update_kubeconfig.py +341 -0
- data/lib/python3.10/site-packages/awscli/customizations/emr/__pycache__/__init__.cpython-310.pyc +0 -0
- data/lib/python3.10/site-packages/awscli/customizations/emr/__pycache__/addinstancegroups.cpython-310.pyc +0 -0
data/lib/python3.10/site-packages/awscli/bcdoc/__init__.py
ADDED
|
@@ -0,0 +1,13 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2013 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License"). You
|
| 4 |
+
# may not use this file except in compliance with the License. A copy of
|
| 5 |
+
# the License is located at
|
| 6 |
+
#
|
| 7 |
+
# http://aws.amazon.com/apache2.0/
|
| 8 |
+
#
|
| 9 |
+
# or in the "license" file accompanying this file. This file is
|
| 10 |
+
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
|
| 11 |
+
# ANY KIND, either express or implied. See the License for the specific
|
| 12 |
+
# language governing permissions and limitations under the License.
|
| 13 |
+
__version__ = '0.16.0'
|
data/lib/python3.10/site-packages/awscli/bcdoc/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (184 Bytes). View file
|
|
|
data/lib/python3.10/site-packages/awscli/bcdoc/__pycache__/docevents.cpython-310.pyc
ADDED
|
Binary file (2.16 kB). View file
|
|
|
data/lib/python3.10/site-packages/awscli/bcdoc/__pycache__/docstringparser.cpython-310.pyc
ADDED
|
Binary file (7.16 kB). View file
|
|
|
data/lib/python3.10/site-packages/awscli/bcdoc/__pycache__/restdoc.cpython-310.pyc
ADDED
|
Binary file (8.95 kB). View file
|
|
|
data/lib/python3.10/site-packages/awscli/bcdoc/__pycache__/style.cpython-310.pyc
ADDED
|
Binary file (13 kB). View file
|
|
|
data/lib/python3.10/site-packages/awscli/bcdoc/__pycache__/textwriter.cpython-310.pyc
ADDED
|
Binary file (33.6 kB). View file
|
|
|
data/lib/python3.10/site-packages/awscli/bcdoc/docevents.py
ADDED
|
@@ -0,0 +1,106 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2012-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License"). You
|
| 4 |
+
# may not use this file except in compliance with the License. A copy of
|
| 5 |
+
# the License is located at
|
| 6 |
+
#
|
| 7 |
+
# http://aws.amazon.com/apache2.0/
|
| 8 |
+
#
|
| 9 |
+
# or in the "license" file accompanying this file. This file is
|
| 10 |
+
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
|
| 11 |
+
# ANY KIND, either express or implied. See the License for the specific
|
| 12 |
+
# language governing permissions and limitations under the License.
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
DOC_EVENTS = {
|
| 16 |
+
'doc-breadcrumbs': '.%s',
|
| 17 |
+
'doc-title': '.%s',
|
| 18 |
+
'doc-description': '.%s',
|
| 19 |
+
'doc-synopsis-start': '.%s',
|
| 20 |
+
'doc-synopsis-option': '.%s.%s',
|
| 21 |
+
'doc-synopsis-end': '.%s',
|
| 22 |
+
'doc-options-start': '.%s',
|
| 23 |
+
'doc-option': '.%s.%s',
|
| 24 |
+
'doc-option-example': '.%s.%s',
|
| 25 |
+
'doc-options-end': '.%s',
|
| 26 |
+
'doc-global-option': '.%s',
|
| 27 |
+
'doc-examples': '.%s',
|
| 28 |
+
'doc-output': '.%s',
|
| 29 |
+
'doc-subitems-start': '.%s',
|
| 30 |
+
'doc-subitem': '.%s.%s',
|
| 31 |
+
'doc-subitems-end': '.%s',
|
| 32 |
+
'doc-relateditems-start': '.%s',
|
| 33 |
+
'doc-relateditem': '.%s.%s',
|
| 34 |
+
'doc-relateditems-end': '.%s'
|
| 35 |
+
}
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
def generate_events(session, help_command):
|
| 39 |
+
# Now generate the documentation events
|
| 40 |
+
session.emit('doc-breadcrumbs.%s' % help_command.event_class,
|
| 41 |
+
help_command=help_command)
|
| 42 |
+
session.emit('doc-title.%s' % help_command.event_class,
|
| 43 |
+
help_command=help_command)
|
| 44 |
+
session.emit('doc-description.%s' % help_command.event_class,
|
| 45 |
+
help_command=help_command)
|
| 46 |
+
session.emit('doc-synopsis-start.%s' % help_command.event_class,
|
| 47 |
+
help_command=help_command)
|
| 48 |
+
if help_command.arg_table:
|
| 49 |
+
for arg_name in help_command.arg_table:
|
| 50 |
+
# An argument can set an '_UNDOCUMENTED' attribute
|
| 51 |
+
# to True to indicate a parameter that exists
|
| 52 |
+
# but shouldn't be documented. This can be used
|
| 53 |
+
# for backwards compatibility of deprecated arguments.
|
| 54 |
+
if getattr(help_command.arg_table[arg_name],
|
| 55 |
+
'_UNDOCUMENTED', False):
|
| 56 |
+
continue
|
| 57 |
+
session.emit(
|
| 58 |
+
'doc-synopsis-option.%s.%s' % (help_command.event_class,
|
| 59 |
+
arg_name),
|
| 60 |
+
arg_name=arg_name, help_command=help_command)
|
| 61 |
+
session.emit('doc-synopsis-end.%s' % help_command.event_class,
|
| 62 |
+
help_command=help_command)
|
| 63 |
+
session.emit('doc-options-start.%s' % help_command.event_class,
|
| 64 |
+
help_command=help_command)
|
| 65 |
+
if help_command.arg_table:
|
| 66 |
+
for arg_name in help_command.arg_table:
|
| 67 |
+
if getattr(help_command.arg_table[arg_name],
|
| 68 |
+
'_UNDOCUMENTED', False):
|
| 69 |
+
continue
|
| 70 |
+
session.emit('doc-option.%s.%s' % (help_command.event_class,
|
| 71 |
+
arg_name),
|
| 72 |
+
arg_name=arg_name, help_command=help_command)
|
| 73 |
+
session.emit('doc-option-example.%s.%s' %
|
| 74 |
+
(help_command.event_class, arg_name),
|
| 75 |
+
arg_name=arg_name, help_command=help_command)
|
| 76 |
+
session.emit('doc-options-end.%s' % help_command.event_class,
|
| 77 |
+
help_command=help_command)
|
| 78 |
+
session.emit('doc-global-option.%s' % help_command.event_class,
|
| 79 |
+
help_command=help_command)
|
| 80 |
+
session.emit('doc-subitems-start.%s' % help_command.event_class,
|
| 81 |
+
help_command=help_command)
|
| 82 |
+
if help_command.command_table:
|
| 83 |
+
for command_name in sorted(help_command.command_table.keys()):
|
| 84 |
+
if hasattr(help_command.command_table[command_name],
|
| 85 |
+
'_UNDOCUMENTED'):
|
| 86 |
+
continue
|
| 87 |
+
session.emit('doc-subitem.%s.%s'
|
| 88 |
+
% (help_command.event_class, command_name),
|
| 89 |
+
command_name=command_name,
|
| 90 |
+
help_command=help_command)
|
| 91 |
+
session.emit('doc-subitems-end.%s' % help_command.event_class,
|
| 92 |
+
help_command=help_command)
|
| 93 |
+
session.emit('doc-examples.%s' % help_command.event_class,
|
| 94 |
+
help_command=help_command)
|
| 95 |
+
session.emit('doc-output.%s' % help_command.event_class,
|
| 96 |
+
help_command=help_command)
|
| 97 |
+
session.emit('doc-relateditems-start.%s' % help_command.event_class,
|
| 98 |
+
help_command=help_command)
|
| 99 |
+
if help_command.related_items:
|
| 100 |
+
for related_item in sorted(help_command.related_items):
|
| 101 |
+
session.emit('doc-relateditem.%s.%s'
|
| 102 |
+
% (help_command.event_class, related_item),
|
| 103 |
+
help_command=help_command,
|
| 104 |
+
related_item=related_item)
|
| 105 |
+
session.emit('doc-relateditems-end.%s' % help_command.event_class,
|
| 106 |
+
help_command=help_command)
|
data/lib/python3.10/site-packages/awscli/bcdoc/docstringparser.py
ADDED
|
@@ -0,0 +1,203 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2012-2013 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License"). You
|
| 4 |
+
# may not use this file except in compliance with the License. A copy of
|
| 5 |
+
# the License is located at
|
| 6 |
+
#
|
| 7 |
+
# http://aws.amazon.com/apache2.0/
|
| 8 |
+
#
|
| 9 |
+
# or in the "license" file accompanying this file. This file is
|
| 10 |
+
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
|
| 11 |
+
# ANY KIND, either express or implied. See the License for the specific
|
| 12 |
+
# language governing permissions and limitations under the License.
|
| 13 |
+
from html.parser import HTMLParser
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
class DocStringParser(HTMLParser):
|
| 17 |
+
"""
|
| 18 |
+
A simple HTML parser. Focused on converting the subset of HTML
|
| 19 |
+
that appears in the documentation strings of the JSON models into
|
| 20 |
+
simple ReST format.
|
| 21 |
+
"""
|
| 22 |
+
|
| 23 |
+
def __init__(self, doc):
|
| 24 |
+
self.tree = None
|
| 25 |
+
self.doc = doc
|
| 26 |
+
HTMLParser.__init__(self)
|
| 27 |
+
|
| 28 |
+
def reset(self):
|
| 29 |
+
HTMLParser.reset(self)
|
| 30 |
+
self.tree = HTMLTree(self.doc)
|
| 31 |
+
|
| 32 |
+
def feed(self, data):
|
| 33 |
+
# HTMLParser is an old style class, so the super() method will not work.
|
| 34 |
+
HTMLParser.feed(self, data)
|
| 35 |
+
self.tree.write()
|
| 36 |
+
self.tree = HTMLTree(self.doc)
|
| 37 |
+
|
| 38 |
+
def close(self):
|
| 39 |
+
HTMLParser.close(self)
|
| 40 |
+
# Write if there is anything remaining.
|
| 41 |
+
self.tree.write()
|
| 42 |
+
self.tree = HTMLTree(self.doc)
|
| 43 |
+
|
| 44 |
+
def handle_starttag(self, tag, attrs):
|
| 45 |
+
self.tree.add_tag(tag, attrs=attrs)
|
| 46 |
+
|
| 47 |
+
def handle_endtag(self, tag):
|
| 48 |
+
self.tree.add_tag(tag, is_start=False)
|
| 49 |
+
|
| 50 |
+
def handle_data(self, data):
|
| 51 |
+
self.tree.add_data(data)
|
| 52 |
+
|
| 53 |
+
|
| 54 |
+
class HTMLTree:
|
| 55 |
+
"""
|
| 56 |
+
A tree which handles HTML nodes. Designed to work with a python HTML parser,
|
| 57 |
+
meaning that the current_node will be the most recently opened tag. When
|
| 58 |
+
a tag is closed, the current_node moves up to the parent node.
|
| 59 |
+
"""
|
| 60 |
+
|
| 61 |
+
def __init__(self, doc):
|
| 62 |
+
self.doc = doc
|
| 63 |
+
self.head = StemNode()
|
| 64 |
+
self.current_node = self.head
|
| 65 |
+
self.unhandled_tags = []
|
| 66 |
+
|
| 67 |
+
def add_tag(self, tag, attrs=None, is_start=True):
|
| 68 |
+
if not self._doc_has_handler(tag, is_start):
|
| 69 |
+
self.unhandled_tags.append(tag)
|
| 70 |
+
return
|
| 71 |
+
|
| 72 |
+
if is_start:
|
| 73 |
+
if tag == 'li':
|
| 74 |
+
node = LineItemNode(attrs)
|
| 75 |
+
else:
|
| 76 |
+
node = TagNode(tag, attrs)
|
| 77 |
+
self.current_node.add_child(node)
|
| 78 |
+
self.current_node = node
|
| 79 |
+
else:
|
| 80 |
+
self.current_node = self.current_node.parent
|
| 81 |
+
|
| 82 |
+
def _doc_has_handler(self, tag, is_start):
|
| 83 |
+
if is_start:
|
| 84 |
+
handler_name = 'start_%s' % tag
|
| 85 |
+
else:
|
| 86 |
+
handler_name = 'end_%s' % tag
|
| 87 |
+
|
| 88 |
+
return hasattr(self.doc.style, handler_name)
|
| 89 |
+
|
| 90 |
+
def add_data(self, data):
|
| 91 |
+
self.current_node.add_child(DataNode(data))
|
| 92 |
+
|
| 93 |
+
def write(self):
|
| 94 |
+
self.head.write(self.doc)
|
| 95 |
+
|
| 96 |
+
|
| 97 |
+
class Node:
|
| 98 |
+
def __init__(self, parent=None):
|
| 99 |
+
self.parent = parent
|
| 100 |
+
|
| 101 |
+
def write(self, doc):
|
| 102 |
+
raise NotImplementedError
|
| 103 |
+
|
| 104 |
+
|
| 105 |
+
class StemNode(Node):
|
| 106 |
+
def __init__(self, parent=None):
|
| 107 |
+
super().__init__(parent)
|
| 108 |
+
self.children = []
|
| 109 |
+
|
| 110 |
+
def add_child(self, child):
|
| 111 |
+
child.parent = self
|
| 112 |
+
self.children.append(child)
|
| 113 |
+
|
| 114 |
+
def write(self, doc):
|
| 115 |
+
self._write_children(doc)
|
| 116 |
+
|
| 117 |
+
def _write_children(self, doc):
|
| 118 |
+
for child in self.children:
|
| 119 |
+
child.write(doc)
|
| 120 |
+
|
| 121 |
+
|
| 122 |
+
class TagNode(StemNode):
|
| 123 |
+
"""
|
| 124 |
+
A generic Tag node. It will verify that handlers exist before writing.
|
| 125 |
+
"""
|
| 126 |
+
|
| 127 |
+
def __init__(self, tag, attrs=None, parent=None):
|
| 128 |
+
super().__init__(parent)
|
| 129 |
+
self.attrs = attrs
|
| 130 |
+
self.tag = tag
|
| 131 |
+
|
| 132 |
+
def write(self, doc):
|
| 133 |
+
self._write_start(doc)
|
| 134 |
+
self._write_children(doc)
|
| 135 |
+
self._write_end(doc)
|
| 136 |
+
|
| 137 |
+
def _write_start(self, doc):
|
| 138 |
+
handler_name = 'start_%s' % self.tag
|
| 139 |
+
if hasattr(doc.style, handler_name):
|
| 140 |
+
getattr(doc.style, handler_name)(self.attrs)
|
| 141 |
+
|
| 142 |
+
def _write_end(self, doc):
|
| 143 |
+
handler_name = 'end_%s' % self.tag
|
| 144 |
+
if hasattr(doc.style, handler_name):
|
| 145 |
+
getattr(doc.style, handler_name)()
|
| 146 |
+
|
| 147 |
+
|
| 148 |
+
class LineItemNode(TagNode):
|
| 149 |
+
def __init__(self, attrs=None, parent=None):
|
| 150 |
+
super().__init__('li', attrs, parent)
|
| 151 |
+
|
| 152 |
+
def write(self, doc):
|
| 153 |
+
self._lstrip(self)
|
| 154 |
+
super().write(doc)
|
| 155 |
+
|
| 156 |
+
def _lstrip(self, node):
|
| 157 |
+
"""
|
| 158 |
+
Traverses the tree, stripping out whitespace until text data is found
|
| 159 |
+
:param node: The node to strip
|
| 160 |
+
:return: True if non-whitespace data was found, False otherwise
|
| 161 |
+
"""
|
| 162 |
+
for child in node.children:
|
| 163 |
+
if isinstance(child, DataNode):
|
| 164 |
+
child.lstrip()
|
| 165 |
+
if child.data:
|
| 166 |
+
return True
|
| 167 |
+
else:
|
| 168 |
+
found = self._lstrip(child)
|
| 169 |
+
if found:
|
| 170 |
+
return True
|
| 171 |
+
|
| 172 |
+
return False
|
| 173 |
+
|
| 174 |
+
|
| 175 |
+
class DataNode(Node):
|
| 176 |
+
"""
|
| 177 |
+
A Node that contains only string data.
|
| 178 |
+
"""
|
| 179 |
+
|
| 180 |
+
def __init__(self, data, parent=None):
|
| 181 |
+
super().__init__(parent)
|
| 182 |
+
if not isinstance(data, str):
|
| 183 |
+
raise ValueError("Expecting string type, %s given." % type(data))
|
| 184 |
+
self.data = data
|
| 185 |
+
|
| 186 |
+
def lstrip(self):
|
| 187 |
+
self.data = self.data.lstrip()
|
| 188 |
+
|
| 189 |
+
def write(self, doc):
|
| 190 |
+
if not self.data:
|
| 191 |
+
return
|
| 192 |
+
|
| 193 |
+
if self.data.isspace():
|
| 194 |
+
str_data = ' '
|
| 195 |
+
else:
|
| 196 |
+
end_space = self.data[-1].isspace()
|
| 197 |
+
words = self.data.split()
|
| 198 |
+
words = doc.translate_words(words)
|
| 199 |
+
str_data = ' '.join(words)
|
| 200 |
+
if end_space:
|
| 201 |
+
str_data += ' '
|
| 202 |
+
|
| 203 |
+
doc.handle_data(str_data)
|
data/lib/python3.10/site-packages/awscli/bcdoc/restdoc.py
ADDED
|
@@ -0,0 +1,240 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2012-2013 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License"). You
|
| 4 |
+
# may not use this file except in compliance with the License. A copy of
|
| 5 |
+
# the License is located at
|
| 6 |
+
#
|
| 7 |
+
# http://aws.amazon.com/apache2.0/
|
| 8 |
+
#
|
| 9 |
+
# or in the "license" file accompanying this file. This file is
|
| 10 |
+
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
|
| 11 |
+
# ANY KIND, either express or implied. See the License for the specific
|
| 12 |
+
# language governing permissions and limitations under the License.
|
| 13 |
+
import logging
|
| 14 |
+
|
| 15 |
+
from botocore.compat import OrderedDict
|
| 16 |
+
from awscli.bcdoc.docstringparser import DocStringParser
|
| 17 |
+
from awscli.bcdoc.style import ReSTStyle
|
| 18 |
+
|
| 19 |
+
LOG = logging.getLogger('bcdocs')
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
class ReSTDocument(object):
|
| 23 |
+
|
| 24 |
+
def __init__(self, target='man'):
|
| 25 |
+
self.style = ReSTStyle(self)
|
| 26 |
+
self.target = target
|
| 27 |
+
self.parser = DocStringParser(self)
|
| 28 |
+
self.keep_data = True
|
| 29 |
+
self.do_translation = False
|
| 30 |
+
self.translation_map = {}
|
| 31 |
+
self.hrefs = {}
|
| 32 |
+
self._writes = []
|
| 33 |
+
self._last_doc_string = None
|
| 34 |
+
|
| 35 |
+
def _write(self, s):
|
| 36 |
+
if self.keep_data and s is not None:
|
| 37 |
+
self._writes.append(s)
|
| 38 |
+
|
| 39 |
+
def write(self, content):
|
| 40 |
+
"""
|
| 41 |
+
Write content into the document.
|
| 42 |
+
"""
|
| 43 |
+
self._write(content)
|
| 44 |
+
|
| 45 |
+
def writeln(self, content):
|
| 46 |
+
"""
|
| 47 |
+
Write content on a newline.
|
| 48 |
+
"""
|
| 49 |
+
self._write('%s%s\n' % (self.style.spaces(), content))
|
| 50 |
+
|
| 51 |
+
def peek_write(self):
|
| 52 |
+
"""
|
| 53 |
+
Returns the last content written to the document without
|
| 54 |
+
removing it from the stack.
|
| 55 |
+
"""
|
| 56 |
+
return self._writes[-1]
|
| 57 |
+
|
| 58 |
+
def pop_write(self):
|
| 59 |
+
"""
|
| 60 |
+
Removes and returns the last content written to the stack.
|
| 61 |
+
"""
|
| 62 |
+
return self._writes.pop()
|
| 63 |
+
|
| 64 |
+
def push_write(self, s):
|
| 65 |
+
"""
|
| 66 |
+
Places new content on the stack.
|
| 67 |
+
"""
|
| 68 |
+
self._writes.append(s)
|
| 69 |
+
|
| 70 |
+
def find_last_write(self, content):
|
| 71 |
+
"""
|
| 72 |
+
Returns the index of the last occurrence of the content argument
|
| 73 |
+
in the stack, or returns None if content is not on the stack.
|
| 74 |
+
"""
|
| 75 |
+
try:
|
| 76 |
+
return len(self._writes) - self._writes[::-1].index(content) - 1
|
| 77 |
+
except ValueError:
|
| 78 |
+
return None
|
| 79 |
+
|
| 80 |
+
def insert_write(self, index, content):
|
| 81 |
+
"""
|
| 82 |
+
Inserts the content argument to the stack directly before the
|
| 83 |
+
supplied index.
|
| 84 |
+
"""
|
| 85 |
+
self._writes.insert(index, content)
|
| 86 |
+
|
| 87 |
+
def getvalue(self):
|
| 88 |
+
"""
|
| 89 |
+
Returns the current content of the document as a string.
|
| 90 |
+
"""
|
| 91 |
+
if self.hrefs:
|
| 92 |
+
self.style.new_paragraph()
|
| 93 |
+
for refname, link in self.hrefs.items():
|
| 94 |
+
self.style.link_target_definition(refname, link)
|
| 95 |
+
return ''.join(self._writes).encode('utf-8')
|
| 96 |
+
|
| 97 |
+
def translate_words(self, words):
|
| 98 |
+
return [self.translation_map.get(w, w) for w in words]
|
| 99 |
+
|
| 100 |
+
def handle_data(self, data):
|
| 101 |
+
if data and self.keep_data:
|
| 102 |
+
self._write(data)
|
| 103 |
+
|
| 104 |
+
def include_doc_string(self, doc_string):
|
| 105 |
+
if doc_string:
|
| 106 |
+
try:
|
| 107 |
+
start = len(self._writes)
|
| 108 |
+
self.parser.feed(doc_string)
|
| 109 |
+
self.parser.close()
|
| 110 |
+
end = len(self._writes)
|
| 111 |
+
self._last_doc_string = (start, end)
|
| 112 |
+
except Exception:
|
| 113 |
+
LOG.debug('Error parsing doc string', exc_info=True)
|
| 114 |
+
LOG.debug(doc_string)
|
| 115 |
+
|
| 116 |
+
def remove_last_doc_string(self):
|
| 117 |
+
# Removes all writes inserted by last doc string
|
| 118 |
+
if self._last_doc_string is not None:
|
| 119 |
+
start, end = self._last_doc_string
|
| 120 |
+
del self._writes[start:end]
|
| 121 |
+
|
| 122 |
+
def write_from_file(self, filename):
|
| 123 |
+
with open(filename, 'r') as f:
|
| 124 |
+
for line in f.readlines():
|
| 125 |
+
self.writeln(line.strip())
|
| 126 |
+
|
| 127 |
+
|
| 128 |
+
class DocumentStructure(ReSTDocument):
|
| 129 |
+
def __init__(self, name, section_names=None, target='man', context=None):
|
| 130 |
+
"""Provides a Hierarichial structure to a ReSTDocument
|
| 131 |
+
|
| 132 |
+
You can write to it similiar to as you can to a ReSTDocument but
|
| 133 |
+
has an innate structure for more orginaztion and abstraction.
|
| 134 |
+
|
| 135 |
+
:param name: The name of the document
|
| 136 |
+
:param section_names: A list of sections to be included
|
| 137 |
+
in the document.
|
| 138 |
+
:param target: The target documentation of the Document structure
|
| 139 |
+
:param context: A dictionary of data to store with the strucuture. These
|
| 140 |
+
are only stored per section not the entire structure.
|
| 141 |
+
"""
|
| 142 |
+
super(DocumentStructure, self).__init__(target=target)
|
| 143 |
+
self._name = name
|
| 144 |
+
self._structure = OrderedDict()
|
| 145 |
+
self._path = [self._name]
|
| 146 |
+
self._context = {}
|
| 147 |
+
if context is not None:
|
| 148 |
+
self._context = context
|
| 149 |
+
if section_names is not None:
|
| 150 |
+
self._generate_structure(section_names)
|
| 151 |
+
|
| 152 |
+
@property
|
| 153 |
+
def name(self):
|
| 154 |
+
"""The name of the document structure"""
|
| 155 |
+
return self._name
|
| 156 |
+
|
| 157 |
+
@property
|
| 158 |
+
def path(self):
|
| 159 |
+
"""
|
| 160 |
+
A list of where to find a particular document structure in the
|
| 161 |
+
overlying document structure.
|
| 162 |
+
"""
|
| 163 |
+
return self._path
|
| 164 |
+
|
| 165 |
+
@path.setter
|
| 166 |
+
def path(self, value):
|
| 167 |
+
self._path = value
|
| 168 |
+
|
| 169 |
+
@property
|
| 170 |
+
def available_sections(self):
|
| 171 |
+
return list(self._structure)
|
| 172 |
+
|
| 173 |
+
@property
|
| 174 |
+
def context(self):
|
| 175 |
+
return self._context
|
| 176 |
+
|
| 177 |
+
def _generate_structure(self, section_names):
|
| 178 |
+
for section_name in section_names:
|
| 179 |
+
self.add_new_section(section_name)
|
| 180 |
+
|
| 181 |
+
def add_new_section(self, name, context=None):
|
| 182 |
+
"""Adds a new section to the current document structure
|
| 183 |
+
|
| 184 |
+
This document structure will be considered a section to the
|
| 185 |
+
current document structure but will in itself be an entirely
|
| 186 |
+
new document structure that can be written to and have sections
|
| 187 |
+
as well
|
| 188 |
+
|
| 189 |
+
:param name: The name of the section.
|
| 190 |
+
:param context: A dictionary of data to store with the strucuture. These
|
| 191 |
+
are only stored per section not the entire structure.
|
| 192 |
+
:rtype: DocumentStructure
|
| 193 |
+
:returns: A new document structure to add to but lives as a section
|
| 194 |
+
to the document structure it was instantiated from.
|
| 195 |
+
"""
|
| 196 |
+
# Add a new section
|
| 197 |
+
section = self.__class__(name=name, target=self.target,
|
| 198 |
+
context=context)
|
| 199 |
+
section.path = self.path + [name]
|
| 200 |
+
# Indent the section apporpriately as well
|
| 201 |
+
section.style.indentation = self.style.indentation
|
| 202 |
+
section.translation_map = self.translation_map
|
| 203 |
+
section.hrefs = self.hrefs
|
| 204 |
+
self._structure[name] = section
|
| 205 |
+
return section
|
| 206 |
+
|
| 207 |
+
def get_section(self, name):
|
| 208 |
+
"""Retrieve a section"""
|
| 209 |
+
return self._structure[name]
|
| 210 |
+
|
| 211 |
+
def delete_section(self, name):
|
| 212 |
+
"""Delete a section"""
|
| 213 |
+
del self._structure[name]
|
| 214 |
+
|
| 215 |
+
def flush_structure(self):
|
| 216 |
+
"""Flushes a doc structure to a ReSTructed string
|
| 217 |
+
|
| 218 |
+
The document is flushed out in a DFS style where sections and their
|
| 219 |
+
subsections' values are added to the string as they are visited.
|
| 220 |
+
"""
|
| 221 |
+
# We are at the root flush the links at the beginning of the
|
| 222 |
+
# document
|
| 223 |
+
if len(self.path) == 1:
|
| 224 |
+
if self.hrefs:
|
| 225 |
+
self.style.new_paragraph()
|
| 226 |
+
for refname, link in self.hrefs.items():
|
| 227 |
+
self.style.link_target_definition(refname, link)
|
| 228 |
+
value = self.getvalue()
|
| 229 |
+
for name, section in self._structure.items():
|
| 230 |
+
value += section.flush_structure()
|
| 231 |
+
return value
|
| 232 |
+
|
| 233 |
+
def getvalue(self):
|
| 234 |
+
return ''.join(self._writes).encode('utf-8')
|
| 235 |
+
|
| 236 |
+
def remove_all_sections(self):
|
| 237 |
+
self._structure = OrderedDict()
|
| 238 |
+
|
| 239 |
+
def clear_text(self):
|
| 240 |
+
self._writes = []
|
data/lib/python3.10/site-packages/awscli/bcdoc/style.py
ADDED
|
@@ -0,0 +1,418 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2012-2013 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License"). You
|
| 4 |
+
# may not use this file except in compliance with the License. A copy of
|
| 5 |
+
# the License is located at
|
| 6 |
+
#
|
| 7 |
+
# http://aws.amazon.com/apache2.0/
|
| 8 |
+
#
|
| 9 |
+
# or in the "license" file accompanying this file. This file is
|
| 10 |
+
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
|
| 11 |
+
# ANY KIND, either express or implied. See the License for the specific
|
| 12 |
+
# language governing permissions and limitations under the License.
|
| 13 |
+
|
| 14 |
+
import logging
|
| 15 |
+
|
| 16 |
+
logger = logging.getLogger('bcdocs')
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
class BaseStyle(object):
|
| 20 |
+
|
| 21 |
+
def __init__(self, doc, indent_width=2):
|
| 22 |
+
self.doc = doc
|
| 23 |
+
self.indent_width = indent_width
|
| 24 |
+
self._indent = 0
|
| 25 |
+
self.keep_data = True
|
| 26 |
+
|
| 27 |
+
@property
|
| 28 |
+
def indentation(self):
|
| 29 |
+
return self._indent
|
| 30 |
+
|
| 31 |
+
@indentation.setter
|
| 32 |
+
def indentation(self, value):
|
| 33 |
+
self._indent = value
|
| 34 |
+
|
| 35 |
+
def new_paragraph(self):
|
| 36 |
+
return '\n%s' % self.spaces()
|
| 37 |
+
|
| 38 |
+
def indent(self):
|
| 39 |
+
self._indent += 1
|
| 40 |
+
|
| 41 |
+
def dedent(self):
|
| 42 |
+
if self._indent > 0:
|
| 43 |
+
self._indent -= 1
|
| 44 |
+
|
| 45 |
+
def spaces(self):
|
| 46 |
+
return ' ' * (self._indent * self.indent_width)
|
| 47 |
+
|
| 48 |
+
def bold(self, s):
|
| 49 |
+
return s
|
| 50 |
+
|
| 51 |
+
def ref(self, link, title=None):
|
| 52 |
+
return link
|
| 53 |
+
|
| 54 |
+
def h2(self, s):
|
| 55 |
+
return s
|
| 56 |
+
|
| 57 |
+
def h3(self, s):
|
| 58 |
+
return s
|
| 59 |
+
|
| 60 |
+
def underline(self, s):
|
| 61 |
+
return s
|
| 62 |
+
|
| 63 |
+
def italics(self, s):
|
| 64 |
+
return s
|
| 65 |
+
|
| 66 |
+
|
| 67 |
+
class ReSTStyle(BaseStyle):
|
| 68 |
+
|
| 69 |
+
def __init__(self, doc, indent_width=2):
|
| 70 |
+
BaseStyle.__init__(self, doc, indent_width)
|
| 71 |
+
self.do_p = True
|
| 72 |
+
self.a_href = None
|
| 73 |
+
self.list_depth = 0
|
| 74 |
+
|
| 75 |
+
def new_paragraph(self):
|
| 76 |
+
self.doc.write('\n\n%s' % self.spaces())
|
| 77 |
+
|
| 78 |
+
def new_line(self):
|
| 79 |
+
self.doc.write('\n%s' % self.spaces())
|
| 80 |
+
|
| 81 |
+
def _start_inline(self, markup):
|
| 82 |
+
self.doc.write(markup)
|
| 83 |
+
|
| 84 |
+
def _end_inline(self, markup):
|
| 85 |
+
# Sometimes the HTML markup has whitespace between the end
|
| 86 |
+
# of the text inside the inline markup and the closing element
|
| 87 |
+
# (e.g. <b>foobar </b>). This trailing space will cause
|
| 88 |
+
# problems in the ReST inline markup so we remove it here
|
| 89 |
+
# by popping the last item written off the stack, striping
|
| 90 |
+
# the whitespace and then pushing it back on the stack.
|
| 91 |
+
last_write = self.doc.pop_write().rstrip(' ')
|
| 92 |
+
|
| 93 |
+
# Sometimes, for whatever reason, a tag like <b/> is present. This
|
| 94 |
+
# is problematic because if we simply translate that directly then
|
| 95 |
+
# we end up with something like ****, which rst will assume is a
|
| 96 |
+
# heading instead of an empty bold.
|
| 97 |
+
if last_write == markup:
|
| 98 |
+
return
|
| 99 |
+
|
| 100 |
+
self.doc.push_write(last_write)
|
| 101 |
+
self.doc.write(markup + ' ')
|
| 102 |
+
|
| 103 |
+
def start_bold(self, attrs=None):
|
| 104 |
+
self._start_inline('**')
|
| 105 |
+
|
| 106 |
+
def end_bold(self):
|
| 107 |
+
self._end_inline('**')
|
| 108 |
+
|
| 109 |
+
def start_b(self, attrs=None):
|
| 110 |
+
self.doc.do_translation = True
|
| 111 |
+
self.start_bold(attrs)
|
| 112 |
+
|
| 113 |
+
def end_b(self):
|
| 114 |
+
self.doc.do_translation = False
|
| 115 |
+
self.end_bold()
|
| 116 |
+
|
| 117 |
+
def bold(self, s):
|
| 118 |
+
if s:
|
| 119 |
+
self.start_bold()
|
| 120 |
+
self.doc.write(s)
|
| 121 |
+
self.end_bold()
|
| 122 |
+
|
| 123 |
+
def ref(self, title, link=None):
|
| 124 |
+
if link is None:
|
| 125 |
+
link = title
|
| 126 |
+
self.doc.write(':doc:`%s <%s>`' % (title, link))
|
| 127 |
+
|
| 128 |
+
def _heading(self, s, border_char):
|
| 129 |
+
border = border_char * len(s)
|
| 130 |
+
self.new_paragraph()
|
| 131 |
+
self.doc.write('%s\n%s\n%s' % (border, s, border))
|
| 132 |
+
self.new_paragraph()
|
| 133 |
+
|
| 134 |
+
def h1(self, s):
|
| 135 |
+
self._heading(s, '*')
|
| 136 |
+
|
| 137 |
+
def h2(self, s):
|
| 138 |
+
self._heading(s, '=')
|
| 139 |
+
|
| 140 |
+
def h3(self, s):
|
| 141 |
+
self._heading(s, '-')
|
| 142 |
+
|
| 143 |
+
def start_italics(self, attrs=None):
|
| 144 |
+
self._start_inline('*')
|
| 145 |
+
|
| 146 |
+
def end_italics(self):
|
| 147 |
+
self._end_inline('*')
|
| 148 |
+
|
| 149 |
+
def italics(self, s):
|
| 150 |
+
if s:
|
| 151 |
+
self.start_italics()
|
| 152 |
+
self.doc.write(s)
|
| 153 |
+
self.end_italics()
|
| 154 |
+
|
| 155 |
+
def start_p(self, attrs=None):
|
| 156 |
+
if self.do_p:
|
| 157 |
+
self.doc.write('\n\n%s' % self.spaces())
|
| 158 |
+
|
| 159 |
+
def end_p(self):
|
| 160 |
+
if self.do_p:
|
| 161 |
+
self.doc.write('\n\n%s' % self.spaces())
|
| 162 |
+
|
| 163 |
+
def start_code(self, attrs=None):
|
| 164 |
+
self.doc.do_translation = True
|
| 165 |
+
self._start_inline('``')
|
| 166 |
+
|
| 167 |
+
def end_code(self):
|
| 168 |
+
self.doc.do_translation = False
|
| 169 |
+
self._end_inline('``')
|
| 170 |
+
|
| 171 |
+
def code(self, s):
|
| 172 |
+
if s:
|
| 173 |
+
self.start_code()
|
| 174 |
+
self.doc.write(s)
|
| 175 |
+
self.end_code()
|
| 176 |
+
|
| 177 |
+
def start_note(self, attrs=None):
|
| 178 |
+
self.new_paragraph()
|
| 179 |
+
self.doc.write('.. note::')
|
| 180 |
+
self.indent()
|
| 181 |
+
self.new_paragraph()
|
| 182 |
+
|
| 183 |
+
def end_note(self):
|
| 184 |
+
self.dedent()
|
| 185 |
+
self.new_paragraph()
|
| 186 |
+
|
| 187 |
+
def start_important(self, attrs=None):
|
| 188 |
+
self.new_paragraph()
|
| 189 |
+
self.doc.write('.. warning::')
|
| 190 |
+
self.indent()
|
| 191 |
+
self.new_paragraph()
|
| 192 |
+
|
| 193 |
+
def end_important(self):
|
| 194 |
+
self.dedent()
|
| 195 |
+
self.new_paragraph()
|
| 196 |
+
|
| 197 |
+
def start_danger(self, attrs=None):
|
| 198 |
+
self.new_paragraph()
|
| 199 |
+
self.doc.write('.. danger::')
|
| 200 |
+
self.indent()
|
| 201 |
+
self.new_paragraph()
|
| 202 |
+
|
| 203 |
+
def end_danger(self):
|
| 204 |
+
self.dedent()
|
| 205 |
+
self.new_paragraph()
|
| 206 |
+
|
| 207 |
+
def start_a(self, attrs=None):
|
| 208 |
+
if attrs:
|
| 209 |
+
for attr_key, attr_value in attrs:
|
| 210 |
+
if attr_key == 'href':
|
| 211 |
+
self.a_href = attr_value
|
| 212 |
+
self.doc.write('`')
|
| 213 |
+
else:
|
| 214 |
+
# There are some model documentation that
|
| 215 |
+
# looks like this: <a>DescribeInstances</a>.
|
| 216 |
+
# In this case we just write out an empty
|
| 217 |
+
# string.
|
| 218 |
+
self.doc.write(' ')
|
| 219 |
+
self.doc.do_translation = True
|
| 220 |
+
|
| 221 |
+
def link_target_definition(self, refname, link):
|
| 222 |
+
self.doc.writeln('.. _%s: %s' % (refname, link))
|
| 223 |
+
|
| 224 |
+
def sphinx_reference_label(self, label, text=None):
|
| 225 |
+
if text is None:
|
| 226 |
+
text = label
|
| 227 |
+
if self.doc.target == 'html':
|
| 228 |
+
self.doc.write(':ref:`%s <%s>`' % (text, label))
|
| 229 |
+
else:
|
| 230 |
+
self.doc.write(text)
|
| 231 |
+
|
| 232 |
+
def end_a(self):
|
| 233 |
+
self.doc.do_translation = False
|
| 234 |
+
if self.a_href:
|
| 235 |
+
last_write = self.doc.pop_write()
|
| 236 |
+
last_write = last_write.rstrip(' ')
|
| 237 |
+
if last_write and last_write != '`':
|
| 238 |
+
if ':' in last_write:
|
| 239 |
+
last_write = last_write.replace(':', r'\:')
|
| 240 |
+
self.doc.push_write(last_write)
|
| 241 |
+
self.doc.push_write(' <%s>`__' % self.a_href)
|
| 242 |
+
elif last_write == '`':
|
| 243 |
+
# Look at start_a(). It will do a self.doc.write('`')
|
| 244 |
+
# which is the start of the link title. If that is the
|
| 245 |
+
# case then there was no link text. We should just
|
| 246 |
+
# use an inline link. The syntax of this is
|
| 247 |
+
# `<http://url>`_
|
| 248 |
+
self.doc.push_write('`<%s>`__' % self.a_href)
|
| 249 |
+
else:
|
| 250 |
+
self.doc.push_write(self.a_href)
|
| 251 |
+
self.doc.hrefs[self.a_href] = self.a_href
|
| 252 |
+
self.doc.write('`__')
|
| 253 |
+
self.a_href = None
|
| 254 |
+
self.doc.write(' ')
|
| 255 |
+
|
| 256 |
+
def start_i(self, attrs=None):
|
| 257 |
+
self.doc.do_translation = True
|
| 258 |
+
self.start_italics()
|
| 259 |
+
|
| 260 |
+
def end_i(self):
|
| 261 |
+
self.doc.do_translation = False
|
| 262 |
+
self.end_italics()
|
| 263 |
+
|
| 264 |
+
def start_li(self, attrs=None):
|
| 265 |
+
self.new_line()
|
| 266 |
+
self.do_p = False
|
| 267 |
+
self.doc.write('* ')
|
| 268 |
+
|
| 269 |
+
def end_li(self):
|
| 270 |
+
self.do_p = True
|
| 271 |
+
self.new_line()
|
| 272 |
+
|
| 273 |
+
def li(self, s):
|
| 274 |
+
if s:
|
| 275 |
+
self.start_li()
|
| 276 |
+
self.doc.writeln(s)
|
| 277 |
+
self.end_li()
|
| 278 |
+
|
| 279 |
+
def start_ul(self, attrs=None):
|
| 280 |
+
if self.list_depth != 0:
|
| 281 |
+
self.indent()
|
| 282 |
+
self.list_depth += 1
|
| 283 |
+
self.new_paragraph()
|
| 284 |
+
|
| 285 |
+
def end_ul(self):
|
| 286 |
+
self.list_depth -= 1
|
| 287 |
+
if self.list_depth != 0:
|
| 288 |
+
self.dedent()
|
| 289 |
+
self.new_paragraph()
|
| 290 |
+
|
| 291 |
+
def start_ol(self, attrs=None):
|
| 292 |
+
# TODO: Need to control the bullets used for LI items
|
| 293 |
+
if self.list_depth != 0:
|
| 294 |
+
self.indent()
|
| 295 |
+
self.list_depth += 1
|
| 296 |
+
self.new_paragraph()
|
| 297 |
+
|
| 298 |
+
def end_ol(self):
|
| 299 |
+
self.list_depth -= 1
|
| 300 |
+
if self.list_depth != 0:
|
| 301 |
+
self.dedent()
|
| 302 |
+
self.new_paragraph()
|
| 303 |
+
|
| 304 |
+
def start_examples(self, attrs=None):
|
| 305 |
+
self.doc.keep_data = False
|
| 306 |
+
|
| 307 |
+
def end_examples(self):
|
| 308 |
+
self.doc.keep_data = True
|
| 309 |
+
|
| 310 |
+
def start_fullname(self, attrs=None):
|
| 311 |
+
self.doc.keep_data = False
|
| 312 |
+
|
| 313 |
+
def end_fullname(self):
|
| 314 |
+
self.doc.keep_data = True
|
| 315 |
+
|
| 316 |
+
def start_codeblock(self, attrs=None):
|
| 317 |
+
self.doc.write('::')
|
| 318 |
+
self.indent()
|
| 319 |
+
self.new_paragraph()
|
| 320 |
+
|
| 321 |
+
def end_codeblock(self):
|
| 322 |
+
self.dedent()
|
| 323 |
+
self.new_paragraph()
|
| 324 |
+
|
| 325 |
+
def codeblock(self, code):
|
| 326 |
+
"""
|
| 327 |
+
Literal code blocks are introduced by ending a paragraph with
|
| 328 |
+
the special marker ::. The literal block must be indented
|
| 329 |
+
(and, like all paragraphs, separated from the surrounding
|
| 330 |
+
ones by blank lines).
|
| 331 |
+
"""
|
| 332 |
+
self.start_codeblock()
|
| 333 |
+
self.doc.writeln(code)
|
| 334 |
+
self.end_codeblock()
|
| 335 |
+
|
| 336 |
+
def toctree(self):
|
| 337 |
+
if self.doc.target == 'html':
|
| 338 |
+
self.doc.write('\n.. toctree::\n')
|
| 339 |
+
self.doc.write(' :maxdepth: 1\n')
|
| 340 |
+
self.doc.write(' :titlesonly:\n\n')
|
| 341 |
+
else:
|
| 342 |
+
self.start_ul()
|
| 343 |
+
|
| 344 |
+
def tocitem(self, item, file_name=None):
|
| 345 |
+
if self.doc.target == 'man':
|
| 346 |
+
self.li(item)
|
| 347 |
+
else:
|
| 348 |
+
if file_name:
|
| 349 |
+
self.doc.writeln(' %s' % file_name)
|
| 350 |
+
else:
|
| 351 |
+
self.doc.writeln(' %s' % item)
|
| 352 |
+
|
| 353 |
+
def hidden_toctree(self):
|
| 354 |
+
if self.doc.target == 'html':
|
| 355 |
+
self.doc.write('\n.. toctree::\n')
|
| 356 |
+
self.doc.write(' :maxdepth: 1\n')
|
| 357 |
+
self.doc.write(' :hidden:\n\n')
|
| 358 |
+
|
| 359 |
+
def hidden_tocitem(self, item):
|
| 360 |
+
if self.doc.target == 'html':
|
| 361 |
+
self.tocitem(item)
|
| 362 |
+
|
| 363 |
+
def table_of_contents(self, title=None, depth=None):
|
| 364 |
+
self.doc.write('.. contents:: ')
|
| 365 |
+
if title is not None:
|
| 366 |
+
self.doc.writeln(title)
|
| 367 |
+
if depth is not None:
|
| 368 |
+
self.doc.writeln(' :depth: %s' % depth)
|
| 369 |
+
|
| 370 |
+
def start_sphinx_py_class(self, class_name):
|
| 371 |
+
self.new_paragraph()
|
| 372 |
+
self.doc.write('.. py:class:: %s' % class_name)
|
| 373 |
+
self.indent()
|
| 374 |
+
self.new_paragraph()
|
| 375 |
+
|
| 376 |
+
def end_sphinx_py_class(self):
|
| 377 |
+
self.dedent()
|
| 378 |
+
self.new_paragraph()
|
| 379 |
+
|
| 380 |
+
def start_sphinx_py_method(self, method_name, parameters=None):
|
| 381 |
+
self.new_paragraph()
|
| 382 |
+
content = '.. py:method:: %s' % method_name
|
| 383 |
+
if parameters is not None:
|
| 384 |
+
content += '(%s)' % parameters
|
| 385 |
+
self.doc.write(content)
|
| 386 |
+
self.indent()
|
| 387 |
+
self.new_paragraph()
|
| 388 |
+
|
| 389 |
+
def end_sphinx_py_method(self):
|
| 390 |
+
self.dedent()
|
| 391 |
+
self.new_paragraph()
|
| 392 |
+
|
| 393 |
+
def start_sphinx_py_attr(self, attr_name):
|
| 394 |
+
self.new_paragraph()
|
| 395 |
+
self.doc.write('.. py:attribute:: %s' % attr_name)
|
| 396 |
+
self.indent()
|
| 397 |
+
self.new_paragraph()
|
| 398 |
+
|
| 399 |
+
def end_sphinx_py_attr(self):
|
| 400 |
+
self.dedent()
|
| 401 |
+
self.new_paragraph()
|
| 402 |
+
|
| 403 |
+
def write_py_doc_string(self, docstring):
|
| 404 |
+
docstring_lines = docstring.splitlines()
|
| 405 |
+
for docstring_line in docstring_lines:
|
| 406 |
+
self.doc.writeln(docstring_line)
|
| 407 |
+
|
| 408 |
+
def external_link(self, title, link):
|
| 409 |
+
if self.doc.target == 'html':
|
| 410 |
+
self.doc.write('`%s <%s>`_' % (title, link))
|
| 411 |
+
else:
|
| 412 |
+
self.doc.write(title)
|
| 413 |
+
|
| 414 |
+
def internal_link(self, title, page):
|
| 415 |
+
if self.doc.target == 'html':
|
| 416 |
+
self.doc.write(':doc:`%s <%s>`' % (title, page))
|
| 417 |
+
else:
|
| 418 |
+
self.doc.write(title)
|
data/lib/python3.10/site-packages/awscli/bcdoc/textwriter.py
ADDED
|
@@ -0,0 +1,799 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# -*- coding: utf-8 -*-
|
| 2 |
+
"""
|
| 3 |
+
|
| 4 |
+
Custom docutils writer for plain text.
|
| 5 |
+
Based heavily on the Sphinx text writer. See copyright below.
|
| 6 |
+
|
| 7 |
+
:copyright: Copyright 2007-2011 by the Sphinx team, see AUTHORS.
|
| 8 |
+
:license: BSD, see LICENSE for details.
|
| 9 |
+
|
| 10 |
+
"""
|
| 11 |
+
import os
|
| 12 |
+
import re
|
| 13 |
+
import textwrap
|
| 14 |
+
|
| 15 |
+
from docutils import nodes, writers
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
class TextWrapper(textwrap.TextWrapper):
|
| 19 |
+
"""Custom subclass that uses a different word separator regex."""
|
| 20 |
+
|
| 21 |
+
wordsep_re = re.compile(
|
| 22 |
+
r'(\s+|' # any whitespace
|
| 23 |
+
r'(?<=\s)(?::[a-z-]+:)?`\S+|' # interpreted text start
|
| 24 |
+
r'[^\s\w]*\w+[a-zA-Z]-(?=\w+[a-zA-Z])|' # hyphenated words
|
| 25 |
+
r'(?<=[\w\!\"\'\&\.\,\?])-{2,}(?=\w))') # em-dash
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
MAXWIDTH = 70
|
| 29 |
+
STDINDENT = 3
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
def my_wrap(text, width=MAXWIDTH, **kwargs):
|
| 33 |
+
w = TextWrapper(width=width, **kwargs)
|
| 34 |
+
return w.wrap(text)
|
| 35 |
+
|
| 36 |
+
|
| 37 |
+
class TextWriter(writers.Writer):
|
| 38 |
+
supported = ('text',)
|
| 39 |
+
settings_spec = ('No options here.', '', ())
|
| 40 |
+
settings_defaults = {}
|
| 41 |
+
|
| 42 |
+
output = None
|
| 43 |
+
|
| 44 |
+
def __init__(self):
|
| 45 |
+
writers.Writer.__init__(self)
|
| 46 |
+
|
| 47 |
+
def translate(self):
|
| 48 |
+
visitor = TextTranslator(self.document)
|
| 49 |
+
self.document.walkabout(visitor)
|
| 50 |
+
self.output = visitor.body
|
| 51 |
+
|
| 52 |
+
|
| 53 |
+
class TextTranslator(nodes.NodeVisitor):
|
| 54 |
+
sectionchars = '*=-~"+`'
|
| 55 |
+
|
| 56 |
+
def __init__(self, document):
|
| 57 |
+
nodes.NodeVisitor.__init__(self, document)
|
| 58 |
+
|
| 59 |
+
self.nl = os.linesep
|
| 60 |
+
self.states = [[]]
|
| 61 |
+
self.stateindent = [0]
|
| 62 |
+
self.list_counter = []
|
| 63 |
+
self.sectionlevel = 0
|
| 64 |
+
self.table = None
|
| 65 |
+
|
| 66 |
+
def add_text(self, text):
|
| 67 |
+
self.states[-1].append((-1, text))
|
| 68 |
+
|
| 69 |
+
def new_state(self, indent=STDINDENT):
|
| 70 |
+
self.states.append([])
|
| 71 |
+
self.stateindent.append(indent)
|
| 72 |
+
|
| 73 |
+
def end_state(self, wrap=True, end=[''], first=None):
|
| 74 |
+
content = self.states.pop()
|
| 75 |
+
maxindent = sum(self.stateindent)
|
| 76 |
+
indent = self.stateindent.pop()
|
| 77 |
+
result = []
|
| 78 |
+
toformat = []
|
| 79 |
+
|
| 80 |
+
def do_format():
|
| 81 |
+
if not toformat:
|
| 82 |
+
return
|
| 83 |
+
if wrap:
|
| 84 |
+
res = my_wrap(''.join(toformat), width=MAXWIDTH-maxindent)
|
| 85 |
+
else:
|
| 86 |
+
res = ''.join(toformat).splitlines()
|
| 87 |
+
if end:
|
| 88 |
+
res += end
|
| 89 |
+
result.append((indent, res))
|
| 90 |
+
for itemindent, item in content:
|
| 91 |
+
if itemindent == -1:
|
| 92 |
+
toformat.append(item)
|
| 93 |
+
else:
|
| 94 |
+
do_format()
|
| 95 |
+
result.append((indent + itemindent, item))
|
| 96 |
+
toformat = []
|
| 97 |
+
do_format()
|
| 98 |
+
if first is not None and result:
|
| 99 |
+
itemindent, item = result[0]
|
| 100 |
+
if item:
|
| 101 |
+
result.insert(0, (itemindent - indent, [first + item[0]]))
|
| 102 |
+
result[1] = (itemindent, item[1:])
|
| 103 |
+
self.states[-1].extend(result)
|
| 104 |
+
|
| 105 |
+
def visit_document(self, node):
|
| 106 |
+
self.new_state(0)
|
| 107 |
+
|
| 108 |
+
def depart_document(self, node):
|
| 109 |
+
self.end_state()
|
| 110 |
+
self.body = self.nl.join(line and (' '*indent + line)
|
| 111 |
+
for indent, lines in self.states[0]
|
| 112 |
+
for line in lines)
|
| 113 |
+
# XXX header/footer?
|
| 114 |
+
|
| 115 |
+
def visit_highlightlang(self, node):
|
| 116 |
+
raise nodes.SkipNode
|
| 117 |
+
|
| 118 |
+
def visit_section(self, node):
|
| 119 |
+
self._title_char = self.sectionchars[self.sectionlevel]
|
| 120 |
+
self.sectionlevel += 1
|
| 121 |
+
|
| 122 |
+
def depart_section(self, node):
|
| 123 |
+
self.sectionlevel -= 1
|
| 124 |
+
|
| 125 |
+
def visit_topic(self, node):
|
| 126 |
+
self.new_state(0)
|
| 127 |
+
|
| 128 |
+
def depart_topic(self, node):
|
| 129 |
+
self.end_state()
|
| 130 |
+
|
| 131 |
+
visit_sidebar = visit_topic
|
| 132 |
+
depart_sidebar = depart_topic
|
| 133 |
+
|
| 134 |
+
def visit_rubric(self, node):
|
| 135 |
+
self.new_state(0)
|
| 136 |
+
self.add_text('-[ ')
|
| 137 |
+
|
| 138 |
+
def depart_rubric(self, node):
|
| 139 |
+
self.add_text(' ]-')
|
| 140 |
+
self.end_state()
|
| 141 |
+
|
| 142 |
+
def visit_compound(self, node):
|
| 143 |
+
pass
|
| 144 |
+
|
| 145 |
+
def depart_compound(self, node):
|
| 146 |
+
pass
|
| 147 |
+
|
| 148 |
+
def visit_glossary(self, node):
|
| 149 |
+
pass
|
| 150 |
+
|
| 151 |
+
def depart_glossary(self, node):
|
| 152 |
+
pass
|
| 153 |
+
|
| 154 |
+
def visit_title(self, node):
|
| 155 |
+
if isinstance(node.parent, nodes.Admonition):
|
| 156 |
+
self.add_text(node.astext()+': ')
|
| 157 |
+
raise nodes.SkipNode
|
| 158 |
+
self.new_state(0)
|
| 159 |
+
|
| 160 |
+
def depart_title(self, node):
|
| 161 |
+
if isinstance(node.parent, nodes.section):
|
| 162 |
+
char = self._title_char
|
| 163 |
+
else:
|
| 164 |
+
char = '^'
|
| 165 |
+
text = ''.join(x[1] for x in self.states.pop() if x[0] == -1)
|
| 166 |
+
self.stateindent.pop()
|
| 167 |
+
self.states[-1].append((0, ['', text, '%s' % (char * len(text)), '']))
|
| 168 |
+
|
| 169 |
+
def visit_subtitle(self, node):
|
| 170 |
+
pass
|
| 171 |
+
|
| 172 |
+
def depart_subtitle(self, node):
|
| 173 |
+
pass
|
| 174 |
+
|
| 175 |
+
def visit_attribution(self, node):
|
| 176 |
+
self.add_text('-- ')
|
| 177 |
+
|
| 178 |
+
def depart_attribution(self, node):
|
| 179 |
+
pass
|
| 180 |
+
|
| 181 |
+
def visit_desc(self, node):
|
| 182 |
+
pass
|
| 183 |
+
|
| 184 |
+
def depart_desc(self, node):
|
| 185 |
+
pass
|
| 186 |
+
|
| 187 |
+
def visit_desc_signature(self, node):
|
| 188 |
+
self.new_state(0)
|
| 189 |
+
if node.parent['objtype'] in ('class', 'exception'):
|
| 190 |
+
self.add_text('%s ' % node.parent['objtype'])
|
| 191 |
+
|
| 192 |
+
def depart_desc_signature(self, node):
|
| 193 |
+
# XXX: wrap signatures in a way that makes sense
|
| 194 |
+
self.end_state(wrap=False, end=None)
|
| 195 |
+
|
| 196 |
+
def visit_desc_name(self, node):
|
| 197 |
+
pass
|
| 198 |
+
|
| 199 |
+
def depart_desc_name(self, node):
|
| 200 |
+
pass
|
| 201 |
+
|
| 202 |
+
def visit_desc_addname(self, node):
|
| 203 |
+
pass
|
| 204 |
+
|
| 205 |
+
def depart_desc_addname(self, node):
|
| 206 |
+
pass
|
| 207 |
+
|
| 208 |
+
def visit_desc_type(self, node):
|
| 209 |
+
pass
|
| 210 |
+
|
| 211 |
+
def depart_desc_type(self, node):
|
| 212 |
+
pass
|
| 213 |
+
|
| 214 |
+
def visit_desc_returns(self, node):
|
| 215 |
+
self.add_text(' -> ')
|
| 216 |
+
|
| 217 |
+
def depart_desc_returns(self, node):
|
| 218 |
+
pass
|
| 219 |
+
|
| 220 |
+
def visit_desc_parameterlist(self, node):
|
| 221 |
+
self.add_text('(')
|
| 222 |
+
self.first_param = 1
|
| 223 |
+
|
| 224 |
+
def depart_desc_parameterlist(self, node):
|
| 225 |
+
self.add_text(')')
|
| 226 |
+
|
| 227 |
+
def visit_desc_parameter(self, node):
|
| 228 |
+
if not self.first_param:
|
| 229 |
+
self.add_text(', ')
|
| 230 |
+
else:
|
| 231 |
+
self.first_param = 0
|
| 232 |
+
self.add_text(node.astext())
|
| 233 |
+
raise nodes.SkipNode
|
| 234 |
+
|
| 235 |
+
def visit_desc_optional(self, node):
|
| 236 |
+
self.add_text('[')
|
| 237 |
+
|
| 238 |
+
def depart_desc_optional(self, node):
|
| 239 |
+
self.add_text(']')
|
| 240 |
+
|
| 241 |
+
def visit_desc_annotation(self, node):
|
| 242 |
+
pass
|
| 243 |
+
|
| 244 |
+
def depart_desc_annotation(self, node):
|
| 245 |
+
pass
|
| 246 |
+
|
| 247 |
+
def visit_refcount(self, node):
|
| 248 |
+
pass
|
| 249 |
+
|
| 250 |
+
def depart_refcount(self, node):
|
| 251 |
+
pass
|
| 252 |
+
|
| 253 |
+
def visit_desc_content(self, node):
|
| 254 |
+
self.new_state()
|
| 255 |
+
self.add_text(self.nl)
|
| 256 |
+
|
| 257 |
+
def depart_desc_content(self, node):
|
| 258 |
+
self.end_state()
|
| 259 |
+
|
| 260 |
+
def visit_figure(self, node):
|
| 261 |
+
self.new_state()
|
| 262 |
+
|
| 263 |
+
def depart_figure(self, node):
|
| 264 |
+
self.end_state()
|
| 265 |
+
|
| 266 |
+
def visit_caption(self, node):
|
| 267 |
+
pass
|
| 268 |
+
|
| 269 |
+
def depart_caption(self, node):
|
| 270 |
+
pass
|
| 271 |
+
|
| 272 |
+
def visit_productionlist(self, node):
|
| 273 |
+
self.new_state()
|
| 274 |
+
names = []
|
| 275 |
+
for production in node:
|
| 276 |
+
names.append(production['tokenname'])
|
| 277 |
+
maxlen = max(len(name) for name in names)
|
| 278 |
+
for production in node:
|
| 279 |
+
if production['tokenname']:
|
| 280 |
+
self.add_text(production['tokenname'].ljust(maxlen) + ' ::=')
|
| 281 |
+
lastname = production['tokenname']
|
| 282 |
+
else:
|
| 283 |
+
self.add_text('%s ' % (' '*len(lastname)))
|
| 284 |
+
self.add_text(production.astext() + self.nl)
|
| 285 |
+
self.end_state(wrap=False)
|
| 286 |
+
raise nodes.SkipNode
|
| 287 |
+
|
| 288 |
+
def visit_seealso(self, node):
|
| 289 |
+
self.new_state()
|
| 290 |
+
|
| 291 |
+
def depart_seealso(self, node):
|
| 292 |
+
self.end_state(first='')
|
| 293 |
+
|
| 294 |
+
def visit_footnote(self, node):
|
| 295 |
+
self._footnote = node.children[0].astext().strip()
|
| 296 |
+
self.new_state(len(self._footnote) + 3)
|
| 297 |
+
|
| 298 |
+
def depart_footnote(self, node):
|
| 299 |
+
self.end_state(first='[%s] ' % self._footnote)
|
| 300 |
+
|
| 301 |
+
def visit_citation(self, node):
|
| 302 |
+
if len(node) and isinstance(node[0], nodes.label):
|
| 303 |
+
self._citlabel = node[0].astext()
|
| 304 |
+
else:
|
| 305 |
+
self._citlabel = ''
|
| 306 |
+
self.new_state(len(self._citlabel) + 3)
|
| 307 |
+
|
| 308 |
+
def depart_citation(self, node):
|
| 309 |
+
self.end_state(first='[%s] ' % self._citlabel)
|
| 310 |
+
|
| 311 |
+
def visit_label(self, node):
|
| 312 |
+
raise nodes.SkipNode
|
| 313 |
+
|
| 314 |
+
# XXX: option list could use some better styling
|
| 315 |
+
|
| 316 |
+
def visit_option_list(self, node):
|
| 317 |
+
pass
|
| 318 |
+
|
| 319 |
+
def depart_option_list(self, node):
|
| 320 |
+
pass
|
| 321 |
+
|
| 322 |
+
def visit_option_list_item(self, node):
|
| 323 |
+
self.new_state(0)
|
| 324 |
+
|
| 325 |
+
def depart_option_list_item(self, node):
|
| 326 |
+
self.end_state()
|
| 327 |
+
|
| 328 |
+
def visit_option_group(self, node):
|
| 329 |
+
self._firstoption = True
|
| 330 |
+
|
| 331 |
+
def depart_option_group(self, node):
|
| 332 |
+
self.add_text(' ')
|
| 333 |
+
|
| 334 |
+
def visit_option(self, node):
|
| 335 |
+
if self._firstoption:
|
| 336 |
+
self._firstoption = False
|
| 337 |
+
else:
|
| 338 |
+
self.add_text(', ')
|
| 339 |
+
|
| 340 |
+
def depart_option(self, node):
|
| 341 |
+
pass
|
| 342 |
+
|
| 343 |
+
def visit_option_string(self, node):
|
| 344 |
+
pass
|
| 345 |
+
|
| 346 |
+
def depart_option_string(self, node):
|
| 347 |
+
pass
|
| 348 |
+
|
| 349 |
+
def visit_option_argument(self, node):
|
| 350 |
+
self.add_text(node['delimiter'])
|
| 351 |
+
|
| 352 |
+
def depart_option_argument(self, node):
|
| 353 |
+
pass
|
| 354 |
+
|
| 355 |
+
def visit_description(self, node):
|
| 356 |
+
pass
|
| 357 |
+
|
| 358 |
+
def depart_description(self, node):
|
| 359 |
+
pass
|
| 360 |
+
|
| 361 |
+
def visit_tabular_col_spec(self, node):
|
| 362 |
+
raise nodes.SkipNode
|
| 363 |
+
|
| 364 |
+
def visit_colspec(self, node):
|
| 365 |
+
self.table[0].append(node['colwidth'])
|
| 366 |
+
raise nodes.SkipNode
|
| 367 |
+
|
| 368 |
+
def visit_tgroup(self, node):
|
| 369 |
+
pass
|
| 370 |
+
|
| 371 |
+
def depart_tgroup(self, node):
|
| 372 |
+
pass
|
| 373 |
+
|
| 374 |
+
def visit_thead(self, node):
|
| 375 |
+
pass
|
| 376 |
+
|
| 377 |
+
def depart_thead(self, node):
|
| 378 |
+
pass
|
| 379 |
+
|
| 380 |
+
def visit_tbody(self, node):
|
| 381 |
+
self.table.append('sep')
|
| 382 |
+
|
| 383 |
+
def depart_tbody(self, node):
|
| 384 |
+
pass
|
| 385 |
+
|
| 386 |
+
def visit_row(self, node):
|
| 387 |
+
self.table.append([])
|
| 388 |
+
|
| 389 |
+
def depart_row(self, node):
|
| 390 |
+
pass
|
| 391 |
+
|
| 392 |
+
def visit_entry(self, node):
|
| 393 |
+
if 'morerows' in node or 'morecols' in node:
|
| 394 |
+
raise NotImplementedError('Column or row spanning cells are '
|
| 395 |
+
'not implemented.')
|
| 396 |
+
self.new_state(0)
|
| 397 |
+
|
| 398 |
+
def depart_entry(self, node):
|
| 399 |
+
text = self.nl.join(self.nl.join(x[1]) for x in self.states.pop())
|
| 400 |
+
self.stateindent.pop()
|
| 401 |
+
self.table[-1].append(text)
|
| 402 |
+
|
| 403 |
+
def visit_table(self, node):
|
| 404 |
+
if self.table:
|
| 405 |
+
raise NotImplementedError('Nested tables are not supported.')
|
| 406 |
+
self.new_state(0)
|
| 407 |
+
self.table = [[]]
|
| 408 |
+
|
| 409 |
+
def depart_table(self, node):
|
| 410 |
+
lines = self.table[1:]
|
| 411 |
+
fmted_rows = []
|
| 412 |
+
colwidths = self.table[0]
|
| 413 |
+
realwidths = colwidths[:]
|
| 414 |
+
separator = 0
|
| 415 |
+
# don't allow paragraphs in table cells for now
|
| 416 |
+
for line in lines:
|
| 417 |
+
if line == 'sep':
|
| 418 |
+
separator = len(fmted_rows)
|
| 419 |
+
else:
|
| 420 |
+
cells = []
|
| 421 |
+
for i, cell in enumerate(line):
|
| 422 |
+
par = my_wrap(cell, width=colwidths[i])
|
| 423 |
+
if par:
|
| 424 |
+
maxwidth = max(map(len, par))
|
| 425 |
+
else:
|
| 426 |
+
maxwidth = 0
|
| 427 |
+
realwidths[i] = max(realwidths[i], maxwidth)
|
| 428 |
+
cells.append(par)
|
| 429 |
+
fmted_rows.append(cells)
|
| 430 |
+
|
| 431 |
+
def writesep(char='-'):
|
| 432 |
+
out = ['+']
|
| 433 |
+
for width in realwidths:
|
| 434 |
+
out.append(char * (width+2))
|
| 435 |
+
out.append('+')
|
| 436 |
+
self.add_text(''.join(out) + self.nl)
|
| 437 |
+
|
| 438 |
+
def writerow(row):
|
| 439 |
+
lines = zip(*row)
|
| 440 |
+
for line in lines:
|
| 441 |
+
out = ['|']
|
| 442 |
+
for i, cell in enumerate(line):
|
| 443 |
+
if cell:
|
| 444 |
+
out.append(' ' + cell.ljust(realwidths[i]+1))
|
| 445 |
+
else:
|
| 446 |
+
out.append(' ' * (realwidths[i] + 2))
|
| 447 |
+
out.append('|')
|
| 448 |
+
self.add_text(''.join(out) + self.nl)
|
| 449 |
+
|
| 450 |
+
for i, row in enumerate(fmted_rows):
|
| 451 |
+
if separator and i == separator:
|
| 452 |
+
writesep('=')
|
| 453 |
+
else:
|
| 454 |
+
writesep('-')
|
| 455 |
+
writerow(row)
|
| 456 |
+
writesep('-')
|
| 457 |
+
self.table = None
|
| 458 |
+
self.end_state(wrap=False)
|
| 459 |
+
|
| 460 |
+
def visit_acks(self, node):
|
| 461 |
+
self.new_state(0)
|
| 462 |
+
self.add_text(
|
| 463 |
+
', '.join(n.astext() for n in node.children[0].children) + '.')
|
| 464 |
+
self.end_state()
|
| 465 |
+
raise nodes.SkipNode
|
| 466 |
+
|
| 467 |
+
def visit_image(self, node):
|
| 468 |
+
if 'alt' in node.attributes:
|
| 469 |
+
self.add_text(_('[image: %s]') % node['alt'])
|
| 470 |
+
self.add_text(_('[image]'))
|
| 471 |
+
raise nodes.SkipNode
|
| 472 |
+
|
| 473 |
+
def visit_transition(self, node):
|
| 474 |
+
indent = sum(self.stateindent)
|
| 475 |
+
self.new_state(0)
|
| 476 |
+
self.add_text('=' * (MAXWIDTH - indent))
|
| 477 |
+
self.end_state()
|
| 478 |
+
raise nodes.SkipNode
|
| 479 |
+
|
| 480 |
+
def visit_bullet_list(self, node):
|
| 481 |
+
self.list_counter.append(-1)
|
| 482 |
+
|
| 483 |
+
def depart_bullet_list(self, node):
|
| 484 |
+
self.list_counter.pop()
|
| 485 |
+
|
| 486 |
+
def visit_enumerated_list(self, node):
|
| 487 |
+
self.list_counter.append(0)
|
| 488 |
+
|
| 489 |
+
def depart_enumerated_list(self, node):
|
| 490 |
+
self.list_counter.pop()
|
| 491 |
+
|
| 492 |
+
def visit_definition_list(self, node):
|
| 493 |
+
self.list_counter.append(-2)
|
| 494 |
+
|
| 495 |
+
def depart_definition_list(self, node):
|
| 496 |
+
self.list_counter.pop()
|
| 497 |
+
|
| 498 |
+
def visit_list_item(self, node):
|
| 499 |
+
if self.list_counter[-1] == -1:
|
| 500 |
+
# bullet list
|
| 501 |
+
self.new_state(2)
|
| 502 |
+
elif self.list_counter[-1] == -2:
|
| 503 |
+
# definition list
|
| 504 |
+
pass
|
| 505 |
+
else:
|
| 506 |
+
# enumerated list
|
| 507 |
+
self.list_counter[-1] += 1
|
| 508 |
+
self.new_state(len(str(self.list_counter[-1])) + 2)
|
| 509 |
+
|
| 510 |
+
def depart_list_item(self, node):
|
| 511 |
+
if self.list_counter[-1] == -1:
|
| 512 |
+
self.end_state(first='* ', end=None)
|
| 513 |
+
elif self.list_counter[-1] == -2:
|
| 514 |
+
pass
|
| 515 |
+
else:
|
| 516 |
+
self.end_state(first='%s. ' % self.list_counter[-1], end=None)
|
| 517 |
+
|
| 518 |
+
def visit_definition_list_item(self, node):
|
| 519 |
+
self._li_has_classifier = len(node) >= 2 and \
|
| 520 |
+
isinstance(node[1], nodes.classifier)
|
| 521 |
+
|
| 522 |
+
def depart_definition_list_item(self, node):
|
| 523 |
+
pass
|
| 524 |
+
|
| 525 |
+
def visit_term(self, node):
|
| 526 |
+
self.new_state(0)
|
| 527 |
+
|
| 528 |
+
def depart_term(self, node):
|
| 529 |
+
if not self._li_has_classifier:
|
| 530 |
+
self.end_state(end=None)
|
| 531 |
+
|
| 532 |
+
def visit_termsep(self, node):
|
| 533 |
+
self.add_text(', ')
|
| 534 |
+
raise nodes.SkipNode
|
| 535 |
+
|
| 536 |
+
def visit_classifier(self, node):
|
| 537 |
+
self.add_text(' : ')
|
| 538 |
+
|
| 539 |
+
def depart_classifier(self, node):
|
| 540 |
+
self.end_state(end=None)
|
| 541 |
+
|
| 542 |
+
def visit_definition(self, node):
|
| 543 |
+
self.new_state()
|
| 544 |
+
|
| 545 |
+
def depart_definition(self, node):
|
| 546 |
+
self.end_state()
|
| 547 |
+
|
| 548 |
+
def visit_field_list(self, node):
|
| 549 |
+
pass
|
| 550 |
+
|
| 551 |
+
def depart_field_list(self, node):
|
| 552 |
+
pass
|
| 553 |
+
|
| 554 |
+
def visit_field(self, node):
|
| 555 |
+
pass
|
| 556 |
+
|
| 557 |
+
def depart_field(self, node):
|
| 558 |
+
pass
|
| 559 |
+
|
| 560 |
+
def visit_field_name(self, node):
|
| 561 |
+
self.new_state(0)
|
| 562 |
+
|
| 563 |
+
def depart_field_name(self, node):
|
| 564 |
+
self.add_text(':')
|
| 565 |
+
self.end_state(end=None)
|
| 566 |
+
|
| 567 |
+
def visit_field_body(self, node):
|
| 568 |
+
self.new_state()
|
| 569 |
+
|
| 570 |
+
def depart_field_body(self, node):
|
| 571 |
+
self.end_state()
|
| 572 |
+
|
| 573 |
+
def visit_centered(self, node):
|
| 574 |
+
pass
|
| 575 |
+
|
| 576 |
+
def depart_centered(self, node):
|
| 577 |
+
pass
|
| 578 |
+
|
| 579 |
+
def visit_hlist(self, node):
|
| 580 |
+
pass
|
| 581 |
+
|
| 582 |
+
def depart_hlist(self, node):
|
| 583 |
+
pass
|
| 584 |
+
|
| 585 |
+
def visit_hlistcol(self, node):
|
| 586 |
+
pass
|
| 587 |
+
|
| 588 |
+
def depart_hlistcol(self, node):
|
| 589 |
+
pass
|
| 590 |
+
|
| 591 |
+
def visit_admonition(self, node):
|
| 592 |
+
self.new_state(0)
|
| 593 |
+
|
| 594 |
+
def depart_admonition(self, node):
|
| 595 |
+
self.end_state()
|
| 596 |
+
|
| 597 |
+
def visit_versionmodified(self, node):
|
| 598 |
+
self.new_state(0)
|
| 599 |
+
|
| 600 |
+
def depart_versionmodified(self, node):
|
| 601 |
+
self.end_state()
|
| 602 |
+
|
| 603 |
+
def visit_literal_block(self, node):
|
| 604 |
+
self.new_state()
|
| 605 |
+
|
| 606 |
+
def depart_literal_block(self, node):
|
| 607 |
+
self.end_state(wrap=False)
|
| 608 |
+
|
| 609 |
+
def visit_doctest_block(self, node):
|
| 610 |
+
self.new_state(0)
|
| 611 |
+
|
| 612 |
+
def depart_doctest_block(self, node):
|
| 613 |
+
self.end_state(wrap=False)
|
| 614 |
+
|
| 615 |
+
def visit_line_block(self, node):
|
| 616 |
+
self.new_state(0)
|
| 617 |
+
|
| 618 |
+
def depart_line_block(self, node):
|
| 619 |
+
self.end_state(wrap=False)
|
| 620 |
+
|
| 621 |
+
def visit_line(self, node):
|
| 622 |
+
pass
|
| 623 |
+
|
| 624 |
+
def depart_line(self, node):
|
| 625 |
+
pass
|
| 626 |
+
|
| 627 |
+
def visit_block_quote(self, node):
|
| 628 |
+
self.new_state()
|
| 629 |
+
|
| 630 |
+
def depart_block_quote(self, node):
|
| 631 |
+
self.end_state()
|
| 632 |
+
|
| 633 |
+
def visit_compact_paragraph(self, node):
|
| 634 |
+
pass
|
| 635 |
+
|
| 636 |
+
def depart_compact_paragraph(self, node):
|
| 637 |
+
pass
|
| 638 |
+
|
| 639 |
+
def visit_paragraph(self, node):
|
| 640 |
+
self.new_state(0)
|
| 641 |
+
|
| 642 |
+
def depart_paragraph(self, node):
|
| 643 |
+
self.end_state()
|
| 644 |
+
|
| 645 |
+
def visit_target(self, node):
|
| 646 |
+
raise nodes.SkipNode
|
| 647 |
+
|
| 648 |
+
def visit_index(self, node):
|
| 649 |
+
raise nodes.SkipNode
|
| 650 |
+
|
| 651 |
+
def visit_substitution_definition(self, node):
|
| 652 |
+
raise nodes.SkipNode
|
| 653 |
+
|
| 654 |
+
def visit_pending_xref(self, node):
|
| 655 |
+
pass
|
| 656 |
+
|
| 657 |
+
def depart_pending_xref(self, node):
|
| 658 |
+
pass
|
| 659 |
+
|
| 660 |
+
def visit_reference(self, node):
|
| 661 |
+
pass
|
| 662 |
+
|
| 663 |
+
def depart_reference(self, node):
|
| 664 |
+
pass
|
| 665 |
+
|
| 666 |
+
def visit_download_reference(self, node):
|
| 667 |
+
pass
|
| 668 |
+
|
| 669 |
+
def depart_download_reference(self, node):
|
| 670 |
+
pass
|
| 671 |
+
|
| 672 |
+
def visit_emphasis(self, node):
|
| 673 |
+
self.add_text('*')
|
| 674 |
+
|
| 675 |
+
def depart_emphasis(self, node):
|
| 676 |
+
self.add_text('*')
|
| 677 |
+
|
| 678 |
+
def visit_literal_emphasis(self, node):
|
| 679 |
+
self.add_text('*')
|
| 680 |
+
|
| 681 |
+
def depart_literal_emphasis(self, node):
|
| 682 |
+
self.add_text('*')
|
| 683 |
+
|
| 684 |
+
def visit_strong(self, node):
|
| 685 |
+
self.add_text('**')
|
| 686 |
+
|
| 687 |
+
def depart_strong(self, node):
|
| 688 |
+
self.add_text('**')
|
| 689 |
+
|
| 690 |
+
def visit_abbreviation(self, node):
|
| 691 |
+
self.add_text('')
|
| 692 |
+
|
| 693 |
+
def depart_abbreviation(self, node):
|
| 694 |
+
if node.hasattr('explanation'):
|
| 695 |
+
self.add_text(' (%s)' % node['explanation'])
|
| 696 |
+
|
| 697 |
+
def visit_title_reference(self, node):
|
| 698 |
+
self.add_text('*')
|
| 699 |
+
|
| 700 |
+
def depart_title_reference(self, node):
|
| 701 |
+
self.add_text('*')
|
| 702 |
+
|
| 703 |
+
def visit_literal(self, node):
|
| 704 |
+
self.add_text('"')
|
| 705 |
+
|
| 706 |
+
def depart_literal(self, node):
|
| 707 |
+
self.add_text('"')
|
| 708 |
+
|
| 709 |
+
def visit_subscript(self, node):
|
| 710 |
+
self.add_text('_')
|
| 711 |
+
|
| 712 |
+
def depart_subscript(self, node):
|
| 713 |
+
pass
|
| 714 |
+
|
| 715 |
+
def visit_superscript(self, node):
|
| 716 |
+
self.add_text('^')
|
| 717 |
+
|
| 718 |
+
def depart_superscript(self, node):
|
| 719 |
+
pass
|
| 720 |
+
|
| 721 |
+
def visit_footnote_reference(self, node):
|
| 722 |
+
self.add_text('[%s]' % node.astext())
|
| 723 |
+
raise nodes.SkipNode
|
| 724 |
+
|
| 725 |
+
def visit_citation_reference(self, node):
|
| 726 |
+
self.add_text('[%s]' % node.astext())
|
| 727 |
+
raise nodes.SkipNode
|
| 728 |
+
|
| 729 |
+
def visit_Text(self, node):
|
| 730 |
+
self.add_text(node.astext())
|
| 731 |
+
|
| 732 |
+
def depart_Text(self, node):
|
| 733 |
+
pass
|
| 734 |
+
|
| 735 |
+
def visit_generated(self, node):
|
| 736 |
+
pass
|
| 737 |
+
|
| 738 |
+
def depart_generated(self, node):
|
| 739 |
+
pass
|
| 740 |
+
|
| 741 |
+
def visit_inline(self, node):
|
| 742 |
+
pass
|
| 743 |
+
|
| 744 |
+
def depart_inline(self, node):
|
| 745 |
+
pass
|
| 746 |
+
|
| 747 |
+
def visit_problematic(self, node):
|
| 748 |
+
self.add_text('>>')
|
| 749 |
+
|
| 750 |
+
def depart_problematic(self, node):
|
| 751 |
+
self.add_text('<<')
|
| 752 |
+
|
| 753 |
+
def visit_system_message(self, node):
|
| 754 |
+
self.new_state(0)
|
| 755 |
+
self.add_text('<SYSTEM MESSAGE: %s>' % node.astext())
|
| 756 |
+
self.end_state()
|
| 757 |
+
raise nodes.SkipNode
|
| 758 |
+
|
| 759 |
+
def visit_comment(self, node):
|
| 760 |
+
raise nodes.SkipNode
|
| 761 |
+
|
| 762 |
+
def visit_meta(self, node):
|
| 763 |
+
# only valid for HTML
|
| 764 |
+
raise nodes.SkipNode
|
| 765 |
+
|
| 766 |
+
def visit_raw(self, node):
|
| 767 |
+
if 'text' in node.get('format', '').split():
|
| 768 |
+
self.body.append(node.astext())
|
| 769 |
+
raise nodes.SkipNode
|
| 770 |
+
|
| 771 |
+
def _visit_admonition(self, node):
|
| 772 |
+
self.new_state(2)
|
| 773 |
+
|
| 774 |
+
def _make_depart_admonition(name):
|
| 775 |
+
def depart_admonition(self, node):
|
| 776 |
+
self.end_state(first=name.capitalize() + ': ')
|
| 777 |
+
return depart_admonition
|
| 778 |
+
|
| 779 |
+
visit_attention = _visit_admonition
|
| 780 |
+
depart_attention = _make_depart_admonition('attention')
|
| 781 |
+
visit_caution = _visit_admonition
|
| 782 |
+
depart_caution = _make_depart_admonition('caution')
|
| 783 |
+
visit_danger = _visit_admonition
|
| 784 |
+
depart_danger = _make_depart_admonition('danger')
|
| 785 |
+
visit_error = _visit_admonition
|
| 786 |
+
depart_error = _make_depart_admonition('error')
|
| 787 |
+
visit_hint = _visit_admonition
|
| 788 |
+
depart_hint = _make_depart_admonition('hint')
|
| 789 |
+
visit_important = _visit_admonition
|
| 790 |
+
depart_important = _make_depart_admonition('important')
|
| 791 |
+
visit_note = _visit_admonition
|
| 792 |
+
depart_note = _make_depart_admonition('note')
|
| 793 |
+
visit_tip = _visit_admonition
|
| 794 |
+
depart_tip = _make_depart_admonition('tip')
|
| 795 |
+
visit_warning = _visit_admonition
|
| 796 |
+
depart_warning = _make_depart_admonition('warning')
|
| 797 |
+
|
| 798 |
+
def unknown_visit(self, node):
|
| 799 |
+
raise NotImplementedError('Unknown node: ' + node.__class__.__name__)
|
data/lib/python3.10/site-packages/awscli/customizations/cloudformation/__init__.py
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2012-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License"). You
|
| 4 |
+
# may not use this file except in compliance with the License. A copy of
|
| 5 |
+
# the License is located at
|
| 6 |
+
#
|
| 7 |
+
# http://aws.amazon.com/apache2.0/
|
| 8 |
+
#
|
| 9 |
+
# or in the "license" file accompanying this file. This file is
|
| 10 |
+
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
|
| 11 |
+
# ANY KIND, either express or implied. See the License for the specific
|
| 12 |
+
# language governing permissions and limitations under the License.
|
| 13 |
+
from awscli.customizations.cloudformation.package import PackageCommand
|
| 14 |
+
from awscli.customizations.cloudformation.deploy import DeployCommand
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
def initialize(cli):
|
| 18 |
+
"""
|
| 19 |
+
The entry point for CloudFormation high level commands.
|
| 20 |
+
"""
|
| 21 |
+
cli.register('building-command-table.cloudformation', inject_commands)
|
| 22 |
+
|
| 23 |
+
|
| 24 |
+
def inject_commands(command_table, session, **kwargs):
|
| 25 |
+
"""
|
| 26 |
+
Called when the CloudFormation command table is being built. Used to
|
| 27 |
+
inject new high level commands into the command list. These high level
|
| 28 |
+
commands must not collide with existing low-level API call names.
|
| 29 |
+
"""
|
| 30 |
+
command_table['package'] = PackageCommand(session)
|
| 31 |
+
command_table['deploy'] = DeployCommand(session)
|
data/lib/python3.10/site-packages/awscli/customizations/cloudformation/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (1 kB). View file
|
|
|
data/lib/python3.10/site-packages/awscli/customizations/cloudformation/__pycache__/artifact_exporter.cpython-310.pyc
ADDED
|
Binary file (17.4 kB). View file
|
|
|
data/lib/python3.10/site-packages/awscli/customizations/cloudformation/__pycache__/deploy.cpython-310.pyc
ADDED
|
Binary file (9.36 kB). View file
|
|
|
data/lib/python3.10/site-packages/awscli/customizations/cloudformation/__pycache__/deployer.cpython-310.pyc
ADDED
|
Binary file (6.48 kB). View file
|
|
|
data/lib/python3.10/site-packages/awscli/customizations/cloudformation/__pycache__/exceptions.cpython-310.pyc
ADDED
|
Binary file (2.84 kB). View file
|
|
|
data/lib/python3.10/site-packages/awscli/customizations/cloudformation/__pycache__/package.cpython-310.pyc
ADDED
|
Binary file (4.12 kB). View file
|
|
|
data/lib/python3.10/site-packages/awscli/customizations/cloudformation/__pycache__/yamlhelper.cpython-310.pyc
ADDED
|
Binary file (2.6 kB). View file
|
|
|
data/lib/python3.10/site-packages/awscli/customizations/cloudformation/artifact_exporter.py
ADDED
|
@@ -0,0 +1,683 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2012-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License"). You
|
| 4 |
+
# may not use this file except in compliance with the License. A copy of
|
| 5 |
+
# the License is located at
|
| 6 |
+
#
|
| 7 |
+
# http://aws.amazon.com/apache2.0/
|
| 8 |
+
#
|
| 9 |
+
# or in the "license" file accompanying this file. This file is
|
| 10 |
+
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
|
| 11 |
+
# ANY KIND, either express or implied. See the License for the specific
|
| 12 |
+
# language governing permissions and limitations under the License.
|
| 13 |
+
|
| 14 |
+
import logging
|
| 15 |
+
import os
|
| 16 |
+
import tempfile
|
| 17 |
+
import zipfile
|
| 18 |
+
import contextlib
|
| 19 |
+
import uuid
|
| 20 |
+
import shutil
|
| 21 |
+
from botocore.utils import set_value_from_jmespath
|
| 22 |
+
|
| 23 |
+
from awscli.compat import urlparse
|
| 24 |
+
from contextlib import contextmanager
|
| 25 |
+
from awscli.customizations.cloudformation import exceptions
|
| 26 |
+
from awscli.customizations.cloudformation.yamlhelper import yaml_dump, \
|
| 27 |
+
yaml_parse
|
| 28 |
+
import jmespath
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
LOG = logging.getLogger(__name__)
|
| 32 |
+
|
| 33 |
+
|
| 34 |
+
def is_path_value_valid(path):
|
| 35 |
+
return isinstance(path, str)
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
def make_abs_path(directory, path):
|
| 39 |
+
if is_path_value_valid(path) and not os.path.isabs(path):
|
| 40 |
+
return os.path.normpath(os.path.join(directory, path))
|
| 41 |
+
else:
|
| 42 |
+
return path
|
| 43 |
+
|
| 44 |
+
|
| 45 |
+
def is_s3_url(url):
|
| 46 |
+
try:
|
| 47 |
+
parse_s3_url(url)
|
| 48 |
+
return True
|
| 49 |
+
except ValueError:
|
| 50 |
+
return False
|
| 51 |
+
|
| 52 |
+
|
| 53 |
+
def is_local_folder(path):
|
| 54 |
+
return is_path_value_valid(path) and os.path.isdir(path)
|
| 55 |
+
|
| 56 |
+
|
| 57 |
+
def is_local_file(path):
|
| 58 |
+
return is_path_value_valid(path) and os.path.isfile(path)
|
| 59 |
+
|
| 60 |
+
|
| 61 |
+
def is_zip_file(path):
|
| 62 |
+
return (
|
| 63 |
+
is_path_value_valid(path) and
|
| 64 |
+
zipfile.is_zipfile(path))
|
| 65 |
+
|
| 66 |
+
|
| 67 |
+
def parse_s3_url(url,
|
| 68 |
+
bucket_name_property="Bucket",
|
| 69 |
+
object_key_property="Key",
|
| 70 |
+
version_property=None):
|
| 71 |
+
|
| 72 |
+
if isinstance(url, str) \
|
| 73 |
+
and url.startswith("s3://"):
|
| 74 |
+
|
| 75 |
+
# Python < 2.7.10 don't parse query parameters from URI with custom
|
| 76 |
+
# scheme such as s3://blah/blah. As a workaround, remove scheme
|
| 77 |
+
# altogether to trigger the parser "s3://foo/bar?v=1" =>"//foo/bar?v=1"
|
| 78 |
+
parsed = urlparse.urlparse(url[3:])
|
| 79 |
+
query = urlparse.parse_qs(parsed.query)
|
| 80 |
+
|
| 81 |
+
if parsed.netloc and parsed.path:
|
| 82 |
+
result = dict()
|
| 83 |
+
result[bucket_name_property] = parsed.netloc
|
| 84 |
+
result[object_key_property] = parsed.path.lstrip('/')
|
| 85 |
+
|
| 86 |
+
# If there is a query string that has a single versionId field,
|
| 87 |
+
# set the object version and return
|
| 88 |
+
if version_property is not None \
|
| 89 |
+
and 'versionId' in query \
|
| 90 |
+
and len(query['versionId']) == 1:
|
| 91 |
+
result[version_property] = query['versionId'][0]
|
| 92 |
+
|
| 93 |
+
return result
|
| 94 |
+
|
| 95 |
+
raise ValueError("URL given to the parse method is not a valid S3 url "
|
| 96 |
+
"{0}".format(url))
|
| 97 |
+
|
| 98 |
+
|
| 99 |
+
def upload_local_artifacts(resource_id, resource_dict, property_name,
|
| 100 |
+
parent_dir, uploader):
|
| 101 |
+
"""
|
| 102 |
+
Upload local artifacts referenced by the property at given resource and
|
| 103 |
+
return S3 URL of the uploaded object. It is the responsibility of callers
|
| 104 |
+
to ensure property value is a valid string
|
| 105 |
+
|
| 106 |
+
If path refers to a file, this method will upload the file. If path refers
|
| 107 |
+
to a folder, this method will zip the folder and upload the zip to S3.
|
| 108 |
+
If path is omitted, this method will zip the current working folder and
|
| 109 |
+
upload.
|
| 110 |
+
|
| 111 |
+
If path is already a path to S3 object, this method does nothing.
|
| 112 |
+
|
| 113 |
+
:param resource_id: Id of the CloudFormation resource
|
| 114 |
+
:param resource_dict: Dictionary containing resource definition
|
| 115 |
+
:param property_name: Property name of CloudFormation resource where this
|
| 116 |
+
local path is present
|
| 117 |
+
:param parent_dir: Resolve all relative paths with respect to this
|
| 118 |
+
directory
|
| 119 |
+
:param uploader: Method to upload files to S3
|
| 120 |
+
|
| 121 |
+
:return: S3 URL of the uploaded object
|
| 122 |
+
:raise: ValueError if path is not a S3 URL or a local path
|
| 123 |
+
"""
|
| 124 |
+
|
| 125 |
+
local_path = jmespath.search(property_name, resource_dict)
|
| 126 |
+
|
| 127 |
+
if local_path is None:
|
| 128 |
+
# Build the root directory and upload to S3
|
| 129 |
+
local_path = parent_dir
|
| 130 |
+
|
| 131 |
+
if is_s3_url(local_path):
|
| 132 |
+
# A valid CloudFormation template will specify artifacts as S3 URLs.
|
| 133 |
+
# This check is supporting the case where your resource does not
|
| 134 |
+
# refer to local artifacts
|
| 135 |
+
# Nothing to do if property value is an S3 URL
|
| 136 |
+
LOG.debug("Property {0} of {1} is already a S3 URL"
|
| 137 |
+
.format(property_name, resource_id))
|
| 138 |
+
return local_path
|
| 139 |
+
|
| 140 |
+
local_path = make_abs_path(parent_dir, local_path)
|
| 141 |
+
|
| 142 |
+
# Or, pointing to a folder. Zip the folder and upload
|
| 143 |
+
if is_local_folder(local_path):
|
| 144 |
+
return zip_and_upload(local_path, uploader)
|
| 145 |
+
|
| 146 |
+
# Path could be pointing to a file. Upload the file
|
| 147 |
+
elif is_local_file(local_path):
|
| 148 |
+
return uploader.upload_with_dedup(local_path)
|
| 149 |
+
|
| 150 |
+
raise exceptions.InvalidLocalPathError(
|
| 151 |
+
resource_id=resource_id,
|
| 152 |
+
property_name=property_name,
|
| 153 |
+
local_path=local_path)
|
| 154 |
+
|
| 155 |
+
|
| 156 |
+
def zip_and_upload(local_path, uploader):
|
| 157 |
+
with zip_folder(local_path) as zipfile:
|
| 158 |
+
return uploader.upload_with_dedup(zipfile)
|
| 159 |
+
|
| 160 |
+
|
| 161 |
+
@contextmanager
|
| 162 |
+
def zip_folder(folder_path):
|
| 163 |
+
"""
|
| 164 |
+
Zip the entire folder and return a file to the zip. Use this inside
|
| 165 |
+
a "with" statement to cleanup the zipfile after it is used.
|
| 166 |
+
|
| 167 |
+
:param folder_path:
|
| 168 |
+
:return: Name of the zipfile
|
| 169 |
+
"""
|
| 170 |
+
|
| 171 |
+
filename = os.path.join(
|
| 172 |
+
tempfile.gettempdir(), "data-" + uuid.uuid4().hex)
|
| 173 |
+
|
| 174 |
+
zipfile_name = make_zip(filename, folder_path)
|
| 175 |
+
try:
|
| 176 |
+
yield zipfile_name
|
| 177 |
+
finally:
|
| 178 |
+
if os.path.exists(zipfile_name):
|
| 179 |
+
os.remove(zipfile_name)
|
| 180 |
+
|
| 181 |
+
|
| 182 |
+
def make_zip(filename, source_root):
|
| 183 |
+
zipfile_name = "{0}.zip".format(filename)
|
| 184 |
+
source_root = os.path.abspath(source_root)
|
| 185 |
+
with open(zipfile_name, 'wb') as f:
|
| 186 |
+
zip_file = zipfile.ZipFile(f, 'w', zipfile.ZIP_DEFLATED)
|
| 187 |
+
with contextlib.closing(zip_file) as zf:
|
| 188 |
+
for root, dirs, files in os.walk(source_root, followlinks=True):
|
| 189 |
+
for filename in files:
|
| 190 |
+
full_path = os.path.join(root, filename)
|
| 191 |
+
relative_path = os.path.relpath(
|
| 192 |
+
full_path, source_root)
|
| 193 |
+
zf.write(full_path, relative_path)
|
| 194 |
+
|
| 195 |
+
return zipfile_name
|
| 196 |
+
|
| 197 |
+
|
| 198 |
+
@contextmanager
|
| 199 |
+
def mktempfile():
|
| 200 |
+
directory = tempfile.gettempdir()
|
| 201 |
+
filename = os.path.join(directory, uuid.uuid4().hex)
|
| 202 |
+
|
| 203 |
+
try:
|
| 204 |
+
with open(filename, "w+") as handle:
|
| 205 |
+
yield handle
|
| 206 |
+
finally:
|
| 207 |
+
if os.path.exists(filename):
|
| 208 |
+
os.remove(filename)
|
| 209 |
+
|
| 210 |
+
|
| 211 |
+
def copy_to_temp_dir(filepath):
|
| 212 |
+
tmp_dir = tempfile.mkdtemp()
|
| 213 |
+
dst = os.path.join(tmp_dir, os.path.basename(filepath))
|
| 214 |
+
shutil.copy(filepath, dst)
|
| 215 |
+
return tmp_dir
|
| 216 |
+
|
| 217 |
+
|
| 218 |
+
class Resource(object):
|
| 219 |
+
"""
|
| 220 |
+
Base class representing a CloudFormation resource that can be exported
|
| 221 |
+
"""
|
| 222 |
+
|
| 223 |
+
RESOURCE_TYPE = None
|
| 224 |
+
PROPERTY_NAME = None
|
| 225 |
+
PACKAGE_NULL_PROPERTY = True
|
| 226 |
+
# Set this property to True in base class if you want the exporter to zip
|
| 227 |
+
# up the file before uploading This is useful for Lambda functions.
|
| 228 |
+
FORCE_ZIP = False
|
| 229 |
+
|
| 230 |
+
def __init__(self, uploader):
|
| 231 |
+
self.uploader = uploader
|
| 232 |
+
|
| 233 |
+
def export(self, resource_id, resource_dict, parent_dir):
|
| 234 |
+
if resource_dict is None:
|
| 235 |
+
return
|
| 236 |
+
|
| 237 |
+
property_value = jmespath.search(self.PROPERTY_NAME, resource_dict)
|
| 238 |
+
|
| 239 |
+
if not property_value and not self.PACKAGE_NULL_PROPERTY:
|
| 240 |
+
return
|
| 241 |
+
|
| 242 |
+
if isinstance(property_value, dict):
|
| 243 |
+
LOG.debug("Property {0} of {1} resource is not a URL"
|
| 244 |
+
.format(self.PROPERTY_NAME, resource_id))
|
| 245 |
+
return
|
| 246 |
+
|
| 247 |
+
# If property is a file but not a zip file, place file in temp
|
| 248 |
+
# folder and send the temp folder to be zipped
|
| 249 |
+
temp_dir = None
|
| 250 |
+
if is_local_file(property_value) and not \
|
| 251 |
+
is_zip_file(property_value) and self.FORCE_ZIP:
|
| 252 |
+
temp_dir = copy_to_temp_dir(property_value)
|
| 253 |
+
set_value_from_jmespath(resource_dict, self.PROPERTY_NAME, temp_dir)
|
| 254 |
+
|
| 255 |
+
try:
|
| 256 |
+
self.do_export(resource_id, resource_dict, parent_dir)
|
| 257 |
+
|
| 258 |
+
except Exception as ex:
|
| 259 |
+
LOG.debug("Unable to export", exc_info=ex)
|
| 260 |
+
raise exceptions.ExportFailedError(
|
| 261 |
+
resource_id=resource_id,
|
| 262 |
+
property_name=self.PROPERTY_NAME,
|
| 263 |
+
property_value=property_value,
|
| 264 |
+
ex=ex)
|
| 265 |
+
finally:
|
| 266 |
+
if temp_dir:
|
| 267 |
+
shutil.rmtree(temp_dir)
|
| 268 |
+
|
| 269 |
+
def do_export(self, resource_id, resource_dict, parent_dir):
|
| 270 |
+
"""
|
| 271 |
+
Default export action is to upload artifacts and set the property to
|
| 272 |
+
S3 URL of the uploaded object
|
| 273 |
+
"""
|
| 274 |
+
uploaded_url = upload_local_artifacts(resource_id, resource_dict,
|
| 275 |
+
self.PROPERTY_NAME,
|
| 276 |
+
parent_dir, self.uploader)
|
| 277 |
+
set_value_from_jmespath(resource_dict, self.PROPERTY_NAME, uploaded_url)
|
| 278 |
+
|
| 279 |
+
|
| 280 |
+
class ResourceWithS3UrlDict(Resource):
|
| 281 |
+
"""
|
| 282 |
+
Represents CloudFormation resources that need the S3 URL to be specified as
|
| 283 |
+
an dict like {Bucket: "", Key: "", Version: ""}
|
| 284 |
+
"""
|
| 285 |
+
|
| 286 |
+
BUCKET_NAME_PROPERTY = None
|
| 287 |
+
OBJECT_KEY_PROPERTY = None
|
| 288 |
+
VERSION_PROPERTY = None
|
| 289 |
+
|
| 290 |
+
def __init__(self, uploader):
|
| 291 |
+
super(ResourceWithS3UrlDict, self).__init__(uploader)
|
| 292 |
+
|
| 293 |
+
def do_export(self, resource_id, resource_dict, parent_dir):
|
| 294 |
+
"""
|
| 295 |
+
Upload to S3 and set property to an dict representing the S3 url
|
| 296 |
+
of the uploaded object
|
| 297 |
+
"""
|
| 298 |
+
|
| 299 |
+
artifact_s3_url = \
|
| 300 |
+
upload_local_artifacts(resource_id, resource_dict,
|
| 301 |
+
self.PROPERTY_NAME,
|
| 302 |
+
parent_dir, self.uploader)
|
| 303 |
+
|
| 304 |
+
parsed_url = parse_s3_url(
|
| 305 |
+
artifact_s3_url,
|
| 306 |
+
bucket_name_property=self.BUCKET_NAME_PROPERTY,
|
| 307 |
+
object_key_property=self.OBJECT_KEY_PROPERTY,
|
| 308 |
+
version_property=self.VERSION_PROPERTY)
|
| 309 |
+
set_value_from_jmespath(resource_dict, self.PROPERTY_NAME, parsed_url)
|
| 310 |
+
|
| 311 |
+
|
| 312 |
+
class ServerlessFunctionResource(Resource):
|
| 313 |
+
RESOURCE_TYPE = "AWS::Serverless::Function"
|
| 314 |
+
PROPERTY_NAME = "CodeUri"
|
| 315 |
+
FORCE_ZIP = True
|
| 316 |
+
|
| 317 |
+
|
| 318 |
+
class ServerlessApiResource(Resource):
|
| 319 |
+
RESOURCE_TYPE = "AWS::Serverless::Api"
|
| 320 |
+
PROPERTY_NAME = "DefinitionUri"
|
| 321 |
+
# Don't package the directory if DefinitionUri is omitted.
|
| 322 |
+
# Necessary to support DefinitionBody
|
| 323 |
+
PACKAGE_NULL_PROPERTY = False
|
| 324 |
+
|
| 325 |
+
|
| 326 |
+
class GraphQLSchemaResource(Resource):
|
| 327 |
+
RESOURCE_TYPE = "AWS::AppSync::GraphQLSchema"
|
| 328 |
+
PROPERTY_NAME = "DefinitionS3Location"
|
| 329 |
+
# Don't package the directory if DefinitionS3Location is omitted.
|
| 330 |
+
# Necessary to support Definition
|
| 331 |
+
PACKAGE_NULL_PROPERTY = False
|
| 332 |
+
|
| 333 |
+
|
| 334 |
+
class AppSyncResolverRequestTemplateResource(Resource):
|
| 335 |
+
RESOURCE_TYPE = "AWS::AppSync::Resolver"
|
| 336 |
+
PROPERTY_NAME = "RequestMappingTemplateS3Location"
|
| 337 |
+
# Don't package the directory if RequestMappingTemplateS3Location is omitted.
|
| 338 |
+
# Necessary to support RequestMappingTemplate
|
| 339 |
+
PACKAGE_NULL_PROPERTY = False
|
| 340 |
+
|
| 341 |
+
|
| 342 |
+
class AppSyncResolverResponseTemplateResource(Resource):
|
| 343 |
+
RESOURCE_TYPE = "AWS::AppSync::Resolver"
|
| 344 |
+
PROPERTY_NAME = "ResponseMappingTemplateS3Location"
|
| 345 |
+
# Don't package the directory if ResponseMappingTemplateS3Location is omitted.
|
| 346 |
+
# Necessary to support ResponseMappingTemplate
|
| 347 |
+
PACKAGE_NULL_PROPERTY = False
|
| 348 |
+
|
| 349 |
+
|
| 350 |
+
class AppSyncFunctionConfigurationRequestTemplateResource(Resource):
|
| 351 |
+
RESOURCE_TYPE = "AWS::AppSync::FunctionConfiguration"
|
| 352 |
+
PROPERTY_NAME = "RequestMappingTemplateS3Location"
|
| 353 |
+
# Don't package the directory if RequestMappingTemplateS3Location is omitted.
|
| 354 |
+
# Necessary to support RequestMappingTemplate
|
| 355 |
+
PACKAGE_NULL_PROPERTY = False
|
| 356 |
+
|
| 357 |
+
|
| 358 |
+
class AppSyncFunctionConfigurationResponseTemplateResource(Resource):
|
| 359 |
+
RESOURCE_TYPE = "AWS::AppSync::FunctionConfiguration"
|
| 360 |
+
PROPERTY_NAME = "ResponseMappingTemplateS3Location"
|
| 361 |
+
# Don't package the directory if ResponseMappingTemplateS3Location is omitted.
|
| 362 |
+
# Necessary to support ResponseMappingTemplate
|
| 363 |
+
PACKAGE_NULL_PROPERTY = False
|
| 364 |
+
|
| 365 |
+
|
| 366 |
+
class LambdaFunctionResource(ResourceWithS3UrlDict):
|
| 367 |
+
RESOURCE_TYPE = "AWS::Lambda::Function"
|
| 368 |
+
PROPERTY_NAME = "Code"
|
| 369 |
+
BUCKET_NAME_PROPERTY = "S3Bucket"
|
| 370 |
+
OBJECT_KEY_PROPERTY = "S3Key"
|
| 371 |
+
VERSION_PROPERTY = "S3ObjectVersion"
|
| 372 |
+
FORCE_ZIP = True
|
| 373 |
+
|
| 374 |
+
|
| 375 |
+
class ApiGatewayRestApiResource(ResourceWithS3UrlDict):
|
| 376 |
+
RESOURCE_TYPE = "AWS::ApiGateway::RestApi"
|
| 377 |
+
PROPERTY_NAME = "BodyS3Location"
|
| 378 |
+
PACKAGE_NULL_PROPERTY = False
|
| 379 |
+
BUCKET_NAME_PROPERTY = "Bucket"
|
| 380 |
+
OBJECT_KEY_PROPERTY = "Key"
|
| 381 |
+
VERSION_PROPERTY = "Version"
|
| 382 |
+
|
| 383 |
+
|
| 384 |
+
class ElasticBeanstalkApplicationVersion(ResourceWithS3UrlDict):
|
| 385 |
+
RESOURCE_TYPE = "AWS::ElasticBeanstalk::ApplicationVersion"
|
| 386 |
+
PROPERTY_NAME = "SourceBundle"
|
| 387 |
+
BUCKET_NAME_PROPERTY = "S3Bucket"
|
| 388 |
+
OBJECT_KEY_PROPERTY = "S3Key"
|
| 389 |
+
VERSION_PROPERTY = None
|
| 390 |
+
|
| 391 |
+
|
| 392 |
+
class LambdaLayerVersionResource(ResourceWithS3UrlDict):
|
| 393 |
+
RESOURCE_TYPE = "AWS::Lambda::LayerVersion"
|
| 394 |
+
PROPERTY_NAME = "Content"
|
| 395 |
+
BUCKET_NAME_PROPERTY = "S3Bucket"
|
| 396 |
+
OBJECT_KEY_PROPERTY = "S3Key"
|
| 397 |
+
VERSION_PROPERTY = "S3ObjectVersion"
|
| 398 |
+
FORCE_ZIP = True
|
| 399 |
+
|
| 400 |
+
|
| 401 |
+
class ServerlessLayerVersionResource(Resource):
|
| 402 |
+
RESOURCE_TYPE = "AWS::Serverless::LayerVersion"
|
| 403 |
+
PROPERTY_NAME = "ContentUri"
|
| 404 |
+
FORCE_ZIP = True
|
| 405 |
+
|
| 406 |
+
|
| 407 |
+
class ServerlessRepoApplicationReadme(Resource):
|
| 408 |
+
RESOURCE_TYPE = "AWS::ServerlessRepo::Application"
|
| 409 |
+
PROPERTY_NAME = "ReadmeUrl"
|
| 410 |
+
PACKAGE_NULL_PROPERTY = False
|
| 411 |
+
|
| 412 |
+
|
| 413 |
+
class ServerlessRepoApplicationLicense(Resource):
|
| 414 |
+
RESOURCE_TYPE = "AWS::ServerlessRepo::Application"
|
| 415 |
+
PROPERTY_NAME = "LicenseUrl"
|
| 416 |
+
PACKAGE_NULL_PROPERTY = False
|
| 417 |
+
|
| 418 |
+
|
| 419 |
+
class StepFunctionsStateMachineDefinitionResource(ResourceWithS3UrlDict):
|
| 420 |
+
RESOURCE_TYPE = "AWS::StepFunctions::StateMachine"
|
| 421 |
+
PROPERTY_NAME = "DefinitionS3Location"
|
| 422 |
+
BUCKET_NAME_PROPERTY = "Bucket"
|
| 423 |
+
OBJECT_KEY_PROPERTY = "Key"
|
| 424 |
+
VERSION_PROPERTY = "Version"
|
| 425 |
+
PACKAGE_NULL_PROPERTY = False
|
| 426 |
+
|
| 427 |
+
|
| 428 |
+
class ServerlessStateMachineDefinitionResource(ResourceWithS3UrlDict):
|
| 429 |
+
RESOURCE_TYPE = "AWS::Serverless::StateMachine"
|
| 430 |
+
PROPERTY_NAME = "DefinitionUri"
|
| 431 |
+
BUCKET_NAME_PROPERTY = "Bucket"
|
| 432 |
+
OBJECT_KEY_PROPERTY = "Key"
|
| 433 |
+
VERSION_PROPERTY = "Version"
|
| 434 |
+
PACKAGE_NULL_PROPERTY = False
|
| 435 |
+
|
| 436 |
+
|
| 437 |
+
class CloudFormationStackResource(Resource):
|
| 438 |
+
"""
|
| 439 |
+
Represents CloudFormation::Stack resource that can refer to a nested
|
| 440 |
+
stack template via TemplateURL property.
|
| 441 |
+
"""
|
| 442 |
+
RESOURCE_TYPE = "AWS::CloudFormation::Stack"
|
| 443 |
+
PROPERTY_NAME = "TemplateURL"
|
| 444 |
+
|
| 445 |
+
def __init__(self, uploader):
|
| 446 |
+
super(CloudFormationStackResource, self).__init__(uploader)
|
| 447 |
+
|
| 448 |
+
def do_export(self, resource_id, resource_dict, parent_dir):
|
| 449 |
+
"""
|
| 450 |
+
If the nested stack template is valid, this method will
|
| 451 |
+
export on the nested template, upload the exported template to S3
|
| 452 |
+
and set property to URL of the uploaded S3 template
|
| 453 |
+
"""
|
| 454 |
+
|
| 455 |
+
template_path = resource_dict.get(self.PROPERTY_NAME, None)
|
| 456 |
+
|
| 457 |
+
if template_path is None or is_s3_url(template_path) or \
|
| 458 |
+
template_path.startswith("http://") or \
|
| 459 |
+
template_path.startswith("https://"):
|
| 460 |
+
# Nothing to do
|
| 461 |
+
return
|
| 462 |
+
|
| 463 |
+
abs_template_path = make_abs_path(parent_dir, template_path)
|
| 464 |
+
if not is_local_file(abs_template_path):
|
| 465 |
+
raise exceptions.InvalidTemplateUrlParameterError(
|
| 466 |
+
property_name=self.PROPERTY_NAME,
|
| 467 |
+
resource_id=resource_id,
|
| 468 |
+
template_path=abs_template_path)
|
| 469 |
+
|
| 470 |
+
exported_template_dict = \
|
| 471 |
+
Template(template_path, parent_dir, self.uploader).export()
|
| 472 |
+
|
| 473 |
+
exported_template_str = yaml_dump(exported_template_dict)
|
| 474 |
+
|
| 475 |
+
with mktempfile() as temporary_file:
|
| 476 |
+
temporary_file.write(exported_template_str)
|
| 477 |
+
temporary_file.flush()
|
| 478 |
+
|
| 479 |
+
url = self.uploader.upload_with_dedup(
|
| 480 |
+
temporary_file.name, "template")
|
| 481 |
+
|
| 482 |
+
# TemplateUrl property requires S3 URL to be in path-style format
|
| 483 |
+
parts = parse_s3_url(url, version_property="Version")
|
| 484 |
+
s3_path_url = self.uploader.to_path_style_s3_url(
|
| 485 |
+
parts["Key"], parts.get("Version", None))
|
| 486 |
+
set_value_from_jmespath(resource_dict, self.PROPERTY_NAME, s3_path_url)
|
| 487 |
+
|
| 488 |
+
|
| 489 |
+
class ServerlessApplicationResource(CloudFormationStackResource):
|
| 490 |
+
"""
|
| 491 |
+
Represents Serverless::Application resource that can refer to a nested
|
| 492 |
+
app template via Location property.
|
| 493 |
+
"""
|
| 494 |
+
RESOURCE_TYPE = "AWS::Serverless::Application"
|
| 495 |
+
PROPERTY_NAME = "Location"
|
| 496 |
+
|
| 497 |
+
|
| 498 |
+
|
| 499 |
+
class GlueJobCommandScriptLocationResource(Resource):
|
| 500 |
+
"""
|
| 501 |
+
Represents Glue::Job resource.
|
| 502 |
+
"""
|
| 503 |
+
RESOURCE_TYPE = "AWS::Glue::Job"
|
| 504 |
+
# Note the PROPERTY_NAME includes a '.' implying it's nested.
|
| 505 |
+
PROPERTY_NAME = "Command.ScriptLocation"
|
| 506 |
+
|
| 507 |
+
|
| 508 |
+
class CodeCommitRepositoryS3Resource(ResourceWithS3UrlDict):
|
| 509 |
+
"""
|
| 510 |
+
Represents CodeCommit::Repository resource.
|
| 511 |
+
"""
|
| 512 |
+
RESOURCE_TYPE = "AWS::CodeCommit::Repository"
|
| 513 |
+
PROPERTY_NAME = "Code.S3"
|
| 514 |
+
BUCKET_NAME_PROPERTY = "Bucket"
|
| 515 |
+
OBJECT_KEY_PROPERTY = "Key"
|
| 516 |
+
VERSION_PROPERTY = "ObjectVersion"
|
| 517 |
+
# Don't package the directory if S3 is omitted.
|
| 518 |
+
PACKAGE_NULL_PROPERTY = False
|
| 519 |
+
FORCE_ZIP = True
|
| 520 |
+
|
| 521 |
+
|
| 522 |
+
RESOURCES_EXPORT_LIST = [
|
| 523 |
+
ServerlessFunctionResource,
|
| 524 |
+
ServerlessApiResource,
|
| 525 |
+
GraphQLSchemaResource,
|
| 526 |
+
AppSyncResolverRequestTemplateResource,
|
| 527 |
+
AppSyncResolverResponseTemplateResource,
|
| 528 |
+
AppSyncFunctionConfigurationRequestTemplateResource,
|
| 529 |
+
AppSyncFunctionConfigurationResponseTemplateResource,
|
| 530 |
+
ApiGatewayRestApiResource,
|
| 531 |
+
LambdaFunctionResource,
|
| 532 |
+
ElasticBeanstalkApplicationVersion,
|
| 533 |
+
CloudFormationStackResource,
|
| 534 |
+
ServerlessApplicationResource,
|
| 535 |
+
ServerlessLayerVersionResource,
|
| 536 |
+
LambdaLayerVersionResource,
|
| 537 |
+
GlueJobCommandScriptLocationResource,
|
| 538 |
+
StepFunctionsStateMachineDefinitionResource,
|
| 539 |
+
ServerlessStateMachineDefinitionResource,
|
| 540 |
+
CodeCommitRepositoryS3Resource
|
| 541 |
+
]
|
| 542 |
+
|
| 543 |
+
METADATA_EXPORT_LIST = [
|
| 544 |
+
ServerlessRepoApplicationReadme,
|
| 545 |
+
ServerlessRepoApplicationLicense
|
| 546 |
+
]
|
| 547 |
+
|
| 548 |
+
|
| 549 |
+
def include_transform_export_handler(template_dict, uploader, parent_dir):
|
| 550 |
+
if template_dict.get("Name", None) != "AWS::Include":
|
| 551 |
+
return template_dict
|
| 552 |
+
|
| 553 |
+
include_location = template_dict.get("Parameters", {}).get("Location", None)
|
| 554 |
+
if not include_location \
|
| 555 |
+
or not is_path_value_valid(include_location) \
|
| 556 |
+
or is_s3_url(include_location):
|
| 557 |
+
# `include_location` is either empty, or not a string, or an S3 URI
|
| 558 |
+
return template_dict
|
| 559 |
+
|
| 560 |
+
# We are confident at this point that `include_location` is a string containing the local path
|
| 561 |
+
abs_include_location = os.path.join(parent_dir, include_location)
|
| 562 |
+
if is_local_file(abs_include_location):
|
| 563 |
+
template_dict["Parameters"]["Location"] = uploader.upload_with_dedup(abs_include_location)
|
| 564 |
+
else:
|
| 565 |
+
raise exceptions.InvalidLocalPathError(
|
| 566 |
+
resource_id="AWS::Include",
|
| 567 |
+
property_name="Location",
|
| 568 |
+
local_path=abs_include_location)
|
| 569 |
+
|
| 570 |
+
return template_dict
|
| 571 |
+
|
| 572 |
+
|
| 573 |
+
GLOBAL_EXPORT_DICT = {
|
| 574 |
+
"Fn::Transform": include_transform_export_handler
|
| 575 |
+
}
|
| 576 |
+
|
| 577 |
+
|
| 578 |
+
class Template(object):
|
| 579 |
+
"""
|
| 580 |
+
Class to export a CloudFormation template
|
| 581 |
+
"""
|
| 582 |
+
|
| 583 |
+
def __init__(self, template_path, parent_dir, uploader,
|
| 584 |
+
resources_to_export=RESOURCES_EXPORT_LIST,
|
| 585 |
+
metadata_to_export=METADATA_EXPORT_LIST):
|
| 586 |
+
"""
|
| 587 |
+
Reads the template and makes it ready for export
|
| 588 |
+
"""
|
| 589 |
+
|
| 590 |
+
if not (is_local_folder(parent_dir) and os.path.isabs(parent_dir)):
|
| 591 |
+
raise ValueError("parent_dir parameter must be "
|
| 592 |
+
"an absolute path to a folder {0}"
|
| 593 |
+
.format(parent_dir))
|
| 594 |
+
|
| 595 |
+
abs_template_path = make_abs_path(parent_dir, template_path)
|
| 596 |
+
template_dir = os.path.dirname(abs_template_path)
|
| 597 |
+
|
| 598 |
+
with open(abs_template_path, "r") as handle:
|
| 599 |
+
template_str = handle.read()
|
| 600 |
+
|
| 601 |
+
self.template_dict = yaml_parse(template_str)
|
| 602 |
+
self.template_dir = template_dir
|
| 603 |
+
self.resources_to_export = resources_to_export
|
| 604 |
+
self.metadata_to_export = metadata_to_export
|
| 605 |
+
self.uploader = uploader
|
| 606 |
+
|
| 607 |
+
def export_global_artifacts(self, template_dict):
|
| 608 |
+
"""
|
| 609 |
+
Template params such as AWS::Include transforms are not specific to
|
| 610 |
+
any resource type but contain artifacts that should be exported,
|
| 611 |
+
here we iterate through the template dict and export params with a
|
| 612 |
+
handler defined in GLOBAL_EXPORT_DICT
|
| 613 |
+
"""
|
| 614 |
+
for key, val in template_dict.items():
|
| 615 |
+
if key in GLOBAL_EXPORT_DICT:
|
| 616 |
+
template_dict[key] = GLOBAL_EXPORT_DICT[key](val, self.uploader, self.template_dir)
|
| 617 |
+
elif isinstance(val, dict):
|
| 618 |
+
self.export_global_artifacts(val)
|
| 619 |
+
elif isinstance(val, list):
|
| 620 |
+
for item in val:
|
| 621 |
+
if isinstance(item, dict):
|
| 622 |
+
self.export_global_artifacts(item)
|
| 623 |
+
return template_dict
|
| 624 |
+
|
| 625 |
+
def export_metadata(self, template_dict):
|
| 626 |
+
"""
|
| 627 |
+
Exports the local artifacts referenced by the metadata section in
|
| 628 |
+
the given template to an s3 bucket.
|
| 629 |
+
|
| 630 |
+
:return: The template with references to artifacts that have been
|
| 631 |
+
exported to s3.
|
| 632 |
+
"""
|
| 633 |
+
if "Metadata" not in template_dict:
|
| 634 |
+
return template_dict
|
| 635 |
+
|
| 636 |
+
for metadata_type, metadata_dict in template_dict["Metadata"].items():
|
| 637 |
+
for exporter_class in self.metadata_to_export:
|
| 638 |
+
if exporter_class.RESOURCE_TYPE != metadata_type:
|
| 639 |
+
continue
|
| 640 |
+
|
| 641 |
+
exporter = exporter_class(self.uploader)
|
| 642 |
+
exporter.export(metadata_type, metadata_dict, self.template_dir)
|
| 643 |
+
|
| 644 |
+
return template_dict
|
| 645 |
+
|
| 646 |
+
def export(self):
|
| 647 |
+
"""
|
| 648 |
+
Exports the local artifacts referenced by the given template to an
|
| 649 |
+
s3 bucket.
|
| 650 |
+
|
| 651 |
+
:return: The template with references to artifacts that have been
|
| 652 |
+
exported to s3.
|
| 653 |
+
"""
|
| 654 |
+
self.template_dict = self.export_metadata(self.template_dict)
|
| 655 |
+
|
| 656 |
+
if "Resources" not in self.template_dict:
|
| 657 |
+
return self.template_dict
|
| 658 |
+
|
| 659 |
+
self.template_dict = self.export_global_artifacts(self.template_dict)
|
| 660 |
+
|
| 661 |
+
self.export_resources(self.template_dict["Resources"])
|
| 662 |
+
|
| 663 |
+
return self.template_dict
|
| 664 |
+
|
| 665 |
+
def export_resources(self, resource_dict):
|
| 666 |
+
for resource_id, resource in resource_dict.items():
|
| 667 |
+
|
| 668 |
+
if resource_id.startswith("Fn::ForEach::"):
|
| 669 |
+
if not isinstance(resource, list) or len(resource) != 3:
|
| 670 |
+
raise exceptions.InvalidForEachIntrinsicFunctionError(resource_id=resource_id)
|
| 671 |
+
self.export_resources(resource[2])
|
| 672 |
+
continue
|
| 673 |
+
|
| 674 |
+
resource_type = resource.get("Type", None)
|
| 675 |
+
resource_dict = resource.get("Properties", None)
|
| 676 |
+
|
| 677 |
+
for exporter_class in self.resources_to_export:
|
| 678 |
+
if exporter_class.RESOURCE_TYPE != resource_type:
|
| 679 |
+
continue
|
| 680 |
+
|
| 681 |
+
# Export code resources
|
| 682 |
+
exporter = exporter_class(self.uploader)
|
| 683 |
+
exporter.export(resource_id, resource_dict, self.template_dir)
|
data/lib/python3.10/site-packages/awscli/customizations/cloudformation/deploy.py
ADDED
|
@@ -0,0 +1,416 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License"). You
|
| 4 |
+
# may not use this file except in compliance with the License. A copy of
|
| 5 |
+
# the License is located at
|
| 6 |
+
#
|
| 7 |
+
# http://aws.amazon.com/apache2.0/
|
| 8 |
+
#
|
| 9 |
+
# or in the "license" file accompanying this file. This file is
|
| 10 |
+
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
|
| 11 |
+
# ANY KIND, either express or implied. See the License for the specific
|
| 12 |
+
# language governing permissions and limitations under the License.
|
| 13 |
+
|
| 14 |
+
import os
|
| 15 |
+
import sys
|
| 16 |
+
import logging
|
| 17 |
+
|
| 18 |
+
from botocore.client import Config
|
| 19 |
+
|
| 20 |
+
from awscli.customizations.cloudformation import exceptions
|
| 21 |
+
from awscli.customizations.cloudformation.deployer import Deployer
|
| 22 |
+
from awscli.customizations.s3uploader import S3Uploader
|
| 23 |
+
from awscli.customizations.cloudformation.yamlhelper import yaml_parse
|
| 24 |
+
|
| 25 |
+
from awscli.customizations.commands import BasicCommand
|
| 26 |
+
from awscli.compat import get_stdout_text_writer
|
| 27 |
+
from awscli.utils import write_exception
|
| 28 |
+
|
| 29 |
+
LOG = logging.getLogger(__name__)
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
class DeployCommand(BasicCommand):
|
| 33 |
+
|
| 34 |
+
MSG_NO_EXECUTE_CHANGESET = \
|
| 35 |
+
("Changeset created successfully. Run the following command to "
|
| 36 |
+
"review changes:"
|
| 37 |
+
"\n"
|
| 38 |
+
"aws cloudformation describe-change-set --change-set-name "
|
| 39 |
+
"{changeset_id}"
|
| 40 |
+
"\n")
|
| 41 |
+
|
| 42 |
+
MSG_EXECUTE_SUCCESS = "Successfully created/updated stack - {stack_name}\n"
|
| 43 |
+
|
| 44 |
+
PARAMETER_OVERRIDE_CMD = "parameter-overrides"
|
| 45 |
+
TAGS_CMD = "tags"
|
| 46 |
+
|
| 47 |
+
NAME = 'deploy'
|
| 48 |
+
DESCRIPTION = BasicCommand.FROM_FILE("cloudformation",
|
| 49 |
+
"_deploy_description.rst")
|
| 50 |
+
|
| 51 |
+
ARG_TABLE = [
|
| 52 |
+
{
|
| 53 |
+
'name': 'template-file',
|
| 54 |
+
'required': True,
|
| 55 |
+
'help_text': (
|
| 56 |
+
'The path where your AWS CloudFormation'
|
| 57 |
+
' template is located.'
|
| 58 |
+
)
|
| 59 |
+
},
|
| 60 |
+
{
|
| 61 |
+
'name': 'stack-name',
|
| 62 |
+
'action': 'store',
|
| 63 |
+
'required': True,
|
| 64 |
+
'help_text': (
|
| 65 |
+
'The name of the AWS CloudFormation stack you\'re deploying to.'
|
| 66 |
+
' If you specify an existing stack, the command updates the'
|
| 67 |
+
' stack. If you specify a new stack, the command creates it.'
|
| 68 |
+
)
|
| 69 |
+
},
|
| 70 |
+
{
|
| 71 |
+
'name': 's3-bucket',
|
| 72 |
+
'required': False,
|
| 73 |
+
'help_text': (
|
| 74 |
+
'The name of the S3 bucket where this command uploads your '
|
| 75 |
+
'CloudFormation template. This is required the deployments of '
|
| 76 |
+
'templates sized greater than 51,200 bytes'
|
| 77 |
+
)
|
| 78 |
+
},
|
| 79 |
+
{
|
| 80 |
+
"name": "force-upload",
|
| 81 |
+
"action": "store_true",
|
| 82 |
+
"help_text": (
|
| 83 |
+
'Indicates whether to override existing files in the S3 bucket.'
|
| 84 |
+
' Specify this flag to upload artifacts even if they '
|
| 85 |
+
' match existing artifacts in the S3 bucket.'
|
| 86 |
+
)
|
| 87 |
+
},
|
| 88 |
+
{
|
| 89 |
+
'name': 's3-prefix',
|
| 90 |
+
'help_text': (
|
| 91 |
+
'A prefix name that the command adds to the'
|
| 92 |
+
' artifacts\' name when it uploads them to the S3 bucket.'
|
| 93 |
+
' The prefix name is a path name (folder name) for'
|
| 94 |
+
' the S3 bucket.'
|
| 95 |
+
)
|
| 96 |
+
},
|
| 97 |
+
|
| 98 |
+
{
|
| 99 |
+
'name': 'kms-key-id',
|
| 100 |
+
'help_text': (
|
| 101 |
+
'The ID of an AWS KMS key that the command uses'
|
| 102 |
+
' to encrypt artifacts that are at rest in the S3 bucket.'
|
| 103 |
+
)
|
| 104 |
+
},
|
| 105 |
+
{
|
| 106 |
+
'name': PARAMETER_OVERRIDE_CMD,
|
| 107 |
+
'action': 'store',
|
| 108 |
+
'required': False,
|
| 109 |
+
'schema': {
|
| 110 |
+
'type': 'array',
|
| 111 |
+
'items': {
|
| 112 |
+
'type': 'string'
|
| 113 |
+
}
|
| 114 |
+
},
|
| 115 |
+
'default': [],
|
| 116 |
+
'help_text': (
|
| 117 |
+
'A list of parameter structures that specify input parameters'
|
| 118 |
+
' for your stack template. If you\'re updating a stack and you'
|
| 119 |
+
' don\'t specify a parameter, the command uses the stack\'s'
|
| 120 |
+
' existing value. For new stacks, you must specify'
|
| 121 |
+
' parameters that don\'t have a default value.'
|
| 122 |
+
' Syntax: ParameterKey1=ParameterValue1'
|
| 123 |
+
' ParameterKey2=ParameterValue2 ...'
|
| 124 |
+
)
|
| 125 |
+
},
|
| 126 |
+
{
|
| 127 |
+
'name': 'capabilities',
|
| 128 |
+
'action': 'store',
|
| 129 |
+
'required': False,
|
| 130 |
+
'schema': {
|
| 131 |
+
'type': 'array',
|
| 132 |
+
'items': {
|
| 133 |
+
'type': 'string',
|
| 134 |
+
'enum': [
|
| 135 |
+
'CAPABILITY_IAM',
|
| 136 |
+
'CAPABILITY_NAMED_IAM'
|
| 137 |
+
]
|
| 138 |
+
}
|
| 139 |
+
},
|
| 140 |
+
'default': [],
|
| 141 |
+
'help_text': (
|
| 142 |
+
'A list of capabilities that you must specify before AWS'
|
| 143 |
+
' Cloudformation can create certain stacks. Some stack'
|
| 144 |
+
' templates might include resources that can affect'
|
| 145 |
+
' permissions in your AWS account, for example, by creating'
|
| 146 |
+
' new AWS Identity and Access Management (IAM) users. For'
|
| 147 |
+
' those stacks, you must explicitly acknowledge their'
|
| 148 |
+
' capabilities by specifying this parameter. '
|
| 149 |
+
' The only valid values are CAPABILITY_IAM and'
|
| 150 |
+
' CAPABILITY_NAMED_IAM. If you have IAM resources, you can'
|
| 151 |
+
' specify either capability. If you have IAM resources with'
|
| 152 |
+
' custom names, you must specify CAPABILITY_NAMED_IAM. If you'
|
| 153 |
+
' don\'t specify this parameter, this action returns an'
|
| 154 |
+
' InsufficientCapabilities error.'
|
| 155 |
+
)
|
| 156 |
+
|
| 157 |
+
},
|
| 158 |
+
{
|
| 159 |
+
'name': 'no-execute-changeset',
|
| 160 |
+
'action': 'store_false',
|
| 161 |
+
'dest': 'execute_changeset',
|
| 162 |
+
'required': False,
|
| 163 |
+
'help_text': (
|
| 164 |
+
'Indicates whether to execute the change set. Specify this'
|
| 165 |
+
' flag if you want to view your stack changes before'
|
| 166 |
+
' executing the change set. The command creates an'
|
| 167 |
+
' AWS CloudFormation change set and then exits without'
|
| 168 |
+
' executing the change set. After you view the change set,'
|
| 169 |
+
' execute it to implement your changes.'
|
| 170 |
+
)
|
| 171 |
+
},
|
| 172 |
+
{
|
| 173 |
+
'name': 'disable-rollback',
|
| 174 |
+
'required': False,
|
| 175 |
+
'action': 'store_true',
|
| 176 |
+
'group_name': 'disable-rollback',
|
| 177 |
+
'dest': 'disable_rollback',
|
| 178 |
+
'default': False,
|
| 179 |
+
'help_text': (
|
| 180 |
+
'Preserve the state of previously provisioned resources when '
|
| 181 |
+
'the execute-change-set operation fails.'
|
| 182 |
+
)
|
| 183 |
+
},
|
| 184 |
+
{
|
| 185 |
+
'name': 'no-disable-rollback',
|
| 186 |
+
'required': False,
|
| 187 |
+
'action': 'store_false',
|
| 188 |
+
'group_name': 'disable-rollback',
|
| 189 |
+
'dest': 'disable_rollback',
|
| 190 |
+
'default': True,
|
| 191 |
+
'help_text': (
|
| 192 |
+
'Roll back all resource changes when the execute-change-set '
|
| 193 |
+
'operation fails.'
|
| 194 |
+
)
|
| 195 |
+
},
|
| 196 |
+
{
|
| 197 |
+
'name': 'role-arn',
|
| 198 |
+
'required': False,
|
| 199 |
+
'help_text': (
|
| 200 |
+
'The Amazon Resource Name (ARN) of an AWS Identity and Access '
|
| 201 |
+
'Management (IAM) role that AWS CloudFormation assumes when '
|
| 202 |
+
'executing the change set.'
|
| 203 |
+
)
|
| 204 |
+
},
|
| 205 |
+
{
|
| 206 |
+
'name': 'notification-arns',
|
| 207 |
+
'required': False,
|
| 208 |
+
'schema': {
|
| 209 |
+
'type': 'array',
|
| 210 |
+
'items': {
|
| 211 |
+
'type': 'string'
|
| 212 |
+
}
|
| 213 |
+
},
|
| 214 |
+
'help_text': (
|
| 215 |
+
'Amazon Simple Notification Service topic Amazon Resource Names'
|
| 216 |
+
' (ARNs) that AWS CloudFormation associates with the stack.'
|
| 217 |
+
)
|
| 218 |
+
},
|
| 219 |
+
{
|
| 220 |
+
'name': 'fail-on-empty-changeset',
|
| 221 |
+
'required': False,
|
| 222 |
+
'action': 'store_true',
|
| 223 |
+
'group_name': 'fail-on-empty-changeset',
|
| 224 |
+
'dest': 'fail_on_empty_changeset',
|
| 225 |
+
'default': True,
|
| 226 |
+
'help_text': (
|
| 227 |
+
'Specify if the CLI should return a non-zero exit code '
|
| 228 |
+
'when there are no changes to be made to the stack. By '
|
| 229 |
+
'default, a non-zero exit code is returned, and this is '
|
| 230 |
+
'the same behavior that occurs when '
|
| 231 |
+
'`--fail-on-empty-changeset` is specified. If '
|
| 232 |
+
'`--no-fail-on-empty-changeset` is specified, then the '
|
| 233 |
+
'CLI will return a zero exit code.'
|
| 234 |
+
)
|
| 235 |
+
},
|
| 236 |
+
{
|
| 237 |
+
'name': 'no-fail-on-empty-changeset',
|
| 238 |
+
'required': False,
|
| 239 |
+
'action': 'store_false',
|
| 240 |
+
'group_name': 'fail-on-empty-changeset',
|
| 241 |
+
'dest': 'fail_on_empty_changeset',
|
| 242 |
+
'default': True,
|
| 243 |
+
'help_text': (
|
| 244 |
+
'Causes the CLI to return an exit code of 0 if there are no '
|
| 245 |
+
'changes to be made to the stack.'
|
| 246 |
+
)
|
| 247 |
+
},
|
| 248 |
+
{
|
| 249 |
+
'name': TAGS_CMD,
|
| 250 |
+
'action': 'store',
|
| 251 |
+
'required': False,
|
| 252 |
+
'schema': {
|
| 253 |
+
'type': 'array',
|
| 254 |
+
'items': {
|
| 255 |
+
'type': 'string'
|
| 256 |
+
}
|
| 257 |
+
},
|
| 258 |
+
'default': [],
|
| 259 |
+
'help_text': (
|
| 260 |
+
'A list of tags to associate with the stack that is created'
|
| 261 |
+
' or updated. AWS CloudFormation also propagates these tags'
|
| 262 |
+
' to resources in the stack if the resource supports it.'
|
| 263 |
+
' Syntax: TagKey1=TagValue1 TagKey2=TagValue2 ...'
|
| 264 |
+
)
|
| 265 |
+
}
|
| 266 |
+
]
|
| 267 |
+
|
| 268 |
+
def _run_main(self, parsed_args, parsed_globals):
|
| 269 |
+
cloudformation_client = \
|
| 270 |
+
self._session.create_client(
|
| 271 |
+
'cloudformation', region_name=parsed_globals.region,
|
| 272 |
+
endpoint_url=parsed_globals.endpoint_url,
|
| 273 |
+
verify=parsed_globals.verify_ssl)
|
| 274 |
+
|
| 275 |
+
template_path = parsed_args.template_file
|
| 276 |
+
if not os.path.isfile(template_path):
|
| 277 |
+
raise exceptions.InvalidTemplatePathError(
|
| 278 |
+
template_path=template_path)
|
| 279 |
+
|
| 280 |
+
# Parse parameters
|
| 281 |
+
with open(template_path, "r") as handle:
|
| 282 |
+
template_str = handle.read()
|
| 283 |
+
|
| 284 |
+
stack_name = parsed_args.stack_name
|
| 285 |
+
parameter_overrides = self.parse_key_value_arg(
|
| 286 |
+
parsed_args.parameter_overrides,
|
| 287 |
+
self.PARAMETER_OVERRIDE_CMD)
|
| 288 |
+
|
| 289 |
+
tags_dict = self.parse_key_value_arg(parsed_args.tags, self.TAGS_CMD)
|
| 290 |
+
tags = [{"Key": key, "Value": value}
|
| 291 |
+
for key, value in tags_dict.items()]
|
| 292 |
+
|
| 293 |
+
template_dict = yaml_parse(template_str)
|
| 294 |
+
|
| 295 |
+
parameters = self.merge_parameters(template_dict, parameter_overrides)
|
| 296 |
+
|
| 297 |
+
template_size = os.path.getsize(parsed_args.template_file)
|
| 298 |
+
if template_size > 51200 and not parsed_args.s3_bucket:
|
| 299 |
+
raise exceptions.DeployBucketRequiredError()
|
| 300 |
+
|
| 301 |
+
bucket = parsed_args.s3_bucket
|
| 302 |
+
if bucket:
|
| 303 |
+
s3_client = self._session.create_client(
|
| 304 |
+
"s3",
|
| 305 |
+
config=Config(signature_version='s3v4'),
|
| 306 |
+
region_name=parsed_globals.region,
|
| 307 |
+
verify=parsed_globals.verify_ssl)
|
| 308 |
+
|
| 309 |
+
s3_uploader = S3Uploader(s3_client,
|
| 310 |
+
bucket,
|
| 311 |
+
parsed_args.s3_prefix,
|
| 312 |
+
parsed_args.kms_key_id,
|
| 313 |
+
parsed_args.force_upload)
|
| 314 |
+
else:
|
| 315 |
+
s3_uploader = None
|
| 316 |
+
|
| 317 |
+
deployer = Deployer(cloudformation_client)
|
| 318 |
+
return self.deploy(deployer, stack_name, template_str,
|
| 319 |
+
parameters, parsed_args.capabilities,
|
| 320 |
+
parsed_args.execute_changeset, parsed_args.role_arn,
|
| 321 |
+
parsed_args.notification_arns, s3_uploader,
|
| 322 |
+
tags, parsed_args.fail_on_empty_changeset,
|
| 323 |
+
parsed_args.disable_rollback)
|
| 324 |
+
|
| 325 |
+
def deploy(self, deployer, stack_name, template_str,
|
| 326 |
+
parameters, capabilities, execute_changeset, role_arn,
|
| 327 |
+
notification_arns, s3_uploader, tags,
|
| 328 |
+
fail_on_empty_changeset=True, disable_rollback=False):
|
| 329 |
+
try:
|
| 330 |
+
result = deployer.create_and_wait_for_changeset(
|
| 331 |
+
stack_name=stack_name,
|
| 332 |
+
cfn_template=template_str,
|
| 333 |
+
parameter_values=parameters,
|
| 334 |
+
capabilities=capabilities,
|
| 335 |
+
role_arn=role_arn,
|
| 336 |
+
notification_arns=notification_arns,
|
| 337 |
+
s3_uploader=s3_uploader,
|
| 338 |
+
tags=tags
|
| 339 |
+
)
|
| 340 |
+
except exceptions.ChangeEmptyError as ex:
|
| 341 |
+
if fail_on_empty_changeset:
|
| 342 |
+
raise
|
| 343 |
+
write_exception(ex, outfile=get_stdout_text_writer())
|
| 344 |
+
return 0
|
| 345 |
+
|
| 346 |
+
if execute_changeset:
|
| 347 |
+
deployer.execute_changeset(result.changeset_id, stack_name,
|
| 348 |
+
disable_rollback)
|
| 349 |
+
deployer.wait_for_execute(stack_name, result.changeset_type)
|
| 350 |
+
sys.stdout.write(self.MSG_EXECUTE_SUCCESS.format(
|
| 351 |
+
stack_name=stack_name))
|
| 352 |
+
else:
|
| 353 |
+
sys.stdout.write(self.MSG_NO_EXECUTE_CHANGESET.format(
|
| 354 |
+
changeset_id=result.changeset_id))
|
| 355 |
+
|
| 356 |
+
sys.stdout.flush()
|
| 357 |
+
return 0
|
| 358 |
+
|
| 359 |
+
def merge_parameters(self, template_dict, parameter_overrides):
|
| 360 |
+
"""
|
| 361 |
+
CloudFormation CreateChangeset requires a value for every parameter
|
| 362 |
+
from the template, either specifying a new value or use previous value.
|
| 363 |
+
For convenience, this method will accept new parameter values and
|
| 364 |
+
generates a dict of all parameters in a format that ChangeSet API
|
| 365 |
+
will accept
|
| 366 |
+
|
| 367 |
+
:param parameter_overrides:
|
| 368 |
+
:return:
|
| 369 |
+
"""
|
| 370 |
+
parameter_values = []
|
| 371 |
+
|
| 372 |
+
if not isinstance(template_dict.get("Parameters", None), dict):
|
| 373 |
+
return parameter_values
|
| 374 |
+
|
| 375 |
+
for key, value in template_dict["Parameters"].items():
|
| 376 |
+
|
| 377 |
+
obj = {
|
| 378 |
+
"ParameterKey": key
|
| 379 |
+
}
|
| 380 |
+
|
| 381 |
+
if key in parameter_overrides:
|
| 382 |
+
obj["ParameterValue"] = parameter_overrides[key]
|
| 383 |
+
else:
|
| 384 |
+
obj["UsePreviousValue"] = True
|
| 385 |
+
|
| 386 |
+
parameter_values.append(obj)
|
| 387 |
+
|
| 388 |
+
return parameter_values
|
| 389 |
+
|
| 390 |
+
def parse_key_value_arg(self, arg_value, argname):
|
| 391 |
+
"""
|
| 392 |
+
Converts arguments that are passed as list of "Key=Value" strings
|
| 393 |
+
into a real dictionary.
|
| 394 |
+
|
| 395 |
+
:param arg_value list: Array of strings, where each string is of
|
| 396 |
+
form Key=Value
|
| 397 |
+
:param argname string: Name of the argument that contains the value
|
| 398 |
+
:return dict: Dictionary representing the key/value pairs
|
| 399 |
+
"""
|
| 400 |
+
result = {}
|
| 401 |
+
for data in arg_value:
|
| 402 |
+
|
| 403 |
+
# Split at first '=' from left
|
| 404 |
+
key_value_pair = data.split("=", 1)
|
| 405 |
+
|
| 406 |
+
if len(key_value_pair) != 2:
|
| 407 |
+
raise exceptions.InvalidKeyValuePairArgumentError(
|
| 408 |
+
argname=argname,
|
| 409 |
+
value=key_value_pair)
|
| 410 |
+
|
| 411 |
+
result[key_value_pair[0]] = key_value_pair[1]
|
| 412 |
+
|
| 413 |
+
return result
|
| 414 |
+
|
| 415 |
+
|
| 416 |
+
|
data/lib/python3.10/site-packages/awscli/customizations/cloudformation/deployer.py
ADDED
|
@@ -0,0 +1,232 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2012-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License"). You
|
| 4 |
+
# may not use this file except in compliance with the License. A copy of
|
| 5 |
+
# the License is located at
|
| 6 |
+
#
|
| 7 |
+
# http://aws.amazon.com/apache2.0/
|
| 8 |
+
#
|
| 9 |
+
# or in the "license" file accompanying this file. This file is
|
| 10 |
+
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
|
| 11 |
+
# ANY KIND, either express or implied. See the License for the specific
|
| 12 |
+
# language governing permissions and limitations under the License.
|
| 13 |
+
|
| 14 |
+
import sys
|
| 15 |
+
import time
|
| 16 |
+
import logging
|
| 17 |
+
import botocore
|
| 18 |
+
import collections
|
| 19 |
+
|
| 20 |
+
from awscli.customizations.cloudformation import exceptions
|
| 21 |
+
from awscli.customizations.cloudformation.artifact_exporter import mktempfile, parse_s3_url
|
| 22 |
+
|
| 23 |
+
from datetime import datetime
|
| 24 |
+
|
| 25 |
+
LOG = logging.getLogger(__name__)
|
| 26 |
+
|
| 27 |
+
ChangeSetResult = collections.namedtuple(
|
| 28 |
+
"ChangeSetResult", ["changeset_id", "changeset_type"])
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
class Deployer(object):
|
| 32 |
+
|
| 33 |
+
def __init__(self, cloudformation_client,
|
| 34 |
+
changeset_prefix="awscli-cloudformation-package-deploy-"):
|
| 35 |
+
self._client = cloudformation_client
|
| 36 |
+
self.changeset_prefix = changeset_prefix
|
| 37 |
+
|
| 38 |
+
def has_stack(self, stack_name):
|
| 39 |
+
"""
|
| 40 |
+
Checks if a CloudFormation stack with given name exists
|
| 41 |
+
|
| 42 |
+
:param stack_name: Name or ID of the stack
|
| 43 |
+
:return: True if stack exists. False otherwise
|
| 44 |
+
"""
|
| 45 |
+
try:
|
| 46 |
+
resp = self._client.describe_stacks(StackName=stack_name)
|
| 47 |
+
if len(resp["Stacks"]) != 1:
|
| 48 |
+
return False
|
| 49 |
+
|
| 50 |
+
# When you run CreateChangeSet on a a stack that does not exist,
|
| 51 |
+
# CloudFormation will create a stack and set it's status
|
| 52 |
+
# REVIEW_IN_PROGRESS. However this stack is cannot be manipulated
|
| 53 |
+
# by "update" commands. Under this circumstances, we treat like
|
| 54 |
+
# this stack does not exist and call CreateChangeSet will
|
| 55 |
+
# ChangeSetType set to CREATE and not UPDATE.
|
| 56 |
+
stack = resp["Stacks"][0]
|
| 57 |
+
return stack["StackStatus"] != "REVIEW_IN_PROGRESS"
|
| 58 |
+
|
| 59 |
+
except botocore.exceptions.ClientError as e:
|
| 60 |
+
# If a stack does not exist, describe_stacks will throw an
|
| 61 |
+
# exception. Unfortunately we don't have a better way than parsing
|
| 62 |
+
# the exception msg to understand the nature of this exception.
|
| 63 |
+
msg = str(e)
|
| 64 |
+
|
| 65 |
+
if "Stack with id {0} does not exist".format(stack_name) in msg:
|
| 66 |
+
LOG.debug("Stack with id {0} does not exist".format(
|
| 67 |
+
stack_name))
|
| 68 |
+
return False
|
| 69 |
+
else:
|
| 70 |
+
# We don't know anything about this exception. Don't handle
|
| 71 |
+
LOG.debug("Unable to get stack details.", exc_info=e)
|
| 72 |
+
raise e
|
| 73 |
+
|
| 74 |
+
def create_changeset(self, stack_name, cfn_template,
|
| 75 |
+
parameter_values, capabilities, role_arn,
|
| 76 |
+
notification_arns, s3_uploader, tags):
|
| 77 |
+
"""
|
| 78 |
+
Call Cloudformation to create a changeset and wait for it to complete
|
| 79 |
+
|
| 80 |
+
:param stack_name: Name or ID of stack
|
| 81 |
+
:param cfn_template: CloudFormation template string
|
| 82 |
+
:param parameter_values: Template parameters object
|
| 83 |
+
:param capabilities: Array of capabilities passed to CloudFormation
|
| 84 |
+
:param tags: Array of tags passed to CloudFormation
|
| 85 |
+
:return:
|
| 86 |
+
"""
|
| 87 |
+
|
| 88 |
+
now = datetime.utcnow().isoformat()
|
| 89 |
+
description = "Created by AWS CLI at {0} UTC".format(now)
|
| 90 |
+
|
| 91 |
+
# Each changeset will get a unique name based on time
|
| 92 |
+
changeset_name = self.changeset_prefix + str(int(time.time()))
|
| 93 |
+
|
| 94 |
+
if not self.has_stack(stack_name):
|
| 95 |
+
changeset_type = "CREATE"
|
| 96 |
+
# When creating a new stack, UsePreviousValue=True is invalid.
|
| 97 |
+
# For such parameters, users should either override with new value,
|
| 98 |
+
# or set a Default value in template to successfully create a stack.
|
| 99 |
+
parameter_values = [x for x in parameter_values
|
| 100 |
+
if not x.get("UsePreviousValue", False)]
|
| 101 |
+
else:
|
| 102 |
+
changeset_type = "UPDATE"
|
| 103 |
+
# UsePreviousValue not valid if parameter is new
|
| 104 |
+
summary = self._client.get_template_summary(StackName=stack_name)
|
| 105 |
+
existing_parameters = [parameter['ParameterKey'] for parameter in \
|
| 106 |
+
summary['Parameters']]
|
| 107 |
+
parameter_values = [x for x in parameter_values
|
| 108 |
+
if not (x.get("UsePreviousValue", False) and \
|
| 109 |
+
x["ParameterKey"] not in existing_parameters)]
|
| 110 |
+
|
| 111 |
+
kwargs = {
|
| 112 |
+
'ChangeSetName': changeset_name,
|
| 113 |
+
'StackName': stack_name,
|
| 114 |
+
'TemplateBody': cfn_template,
|
| 115 |
+
'ChangeSetType': changeset_type,
|
| 116 |
+
'Parameters': parameter_values,
|
| 117 |
+
'Capabilities': capabilities,
|
| 118 |
+
'Description': description,
|
| 119 |
+
'Tags': tags,
|
| 120 |
+
}
|
| 121 |
+
|
| 122 |
+
# If an S3 uploader is available, use TemplateURL to deploy rather than
|
| 123 |
+
# TemplateBody. This is required for large templates.
|
| 124 |
+
if s3_uploader:
|
| 125 |
+
with mktempfile() as temporary_file:
|
| 126 |
+
temporary_file.write(kwargs.pop('TemplateBody'))
|
| 127 |
+
temporary_file.flush()
|
| 128 |
+
url = s3_uploader.upload_with_dedup(
|
| 129 |
+
temporary_file.name, "template")
|
| 130 |
+
# TemplateUrl property requires S3 URL to be in path-style format
|
| 131 |
+
parts = parse_s3_url(url, version_property="Version")
|
| 132 |
+
kwargs['TemplateURL'] = s3_uploader.to_path_style_s3_url(parts["Key"], parts.get("Version", None))
|
| 133 |
+
|
| 134 |
+
# don't set these arguments if not specified to use existing values
|
| 135 |
+
if role_arn is not None:
|
| 136 |
+
kwargs['RoleARN'] = role_arn
|
| 137 |
+
if notification_arns is not None:
|
| 138 |
+
kwargs['NotificationARNs'] = notification_arns
|
| 139 |
+
try:
|
| 140 |
+
resp = self._client.create_change_set(**kwargs)
|
| 141 |
+
return ChangeSetResult(resp["Id"], changeset_type)
|
| 142 |
+
except Exception as ex:
|
| 143 |
+
LOG.debug("Unable to create changeset", exc_info=ex)
|
| 144 |
+
raise ex
|
| 145 |
+
|
| 146 |
+
def wait_for_changeset(self, changeset_id, stack_name):
|
| 147 |
+
"""
|
| 148 |
+
Waits until the changeset creation completes
|
| 149 |
+
|
| 150 |
+
:param changeset_id: ID or name of the changeset
|
| 151 |
+
:param stack_name: Stack name
|
| 152 |
+
:return: Latest status of the create-change-set operation
|
| 153 |
+
"""
|
| 154 |
+
sys.stdout.write("\nWaiting for changeset to be created..\n")
|
| 155 |
+
sys.stdout.flush()
|
| 156 |
+
|
| 157 |
+
# Wait for changeset to be created
|
| 158 |
+
waiter = self._client.get_waiter("change_set_create_complete")
|
| 159 |
+
# Poll every 5 seconds. Changeset creation should be fast
|
| 160 |
+
waiter_config = {'Delay': 5}
|
| 161 |
+
try:
|
| 162 |
+
waiter.wait(ChangeSetName=changeset_id, StackName=stack_name,
|
| 163 |
+
WaiterConfig=waiter_config)
|
| 164 |
+
except botocore.exceptions.WaiterError as ex:
|
| 165 |
+
LOG.debug("Create changeset waiter exception", exc_info=ex)
|
| 166 |
+
|
| 167 |
+
resp = ex.last_response
|
| 168 |
+
status = resp["Status"]
|
| 169 |
+
reason = resp["StatusReason"]
|
| 170 |
+
|
| 171 |
+
if status == "FAILED" and \
|
| 172 |
+
"The submitted information didn't contain changes." in reason or \
|
| 173 |
+
"No updates are to be performed" in reason:
|
| 174 |
+
raise exceptions.ChangeEmptyError(stack_name=stack_name)
|
| 175 |
+
|
| 176 |
+
raise RuntimeError("Failed to create the changeset: {0} "
|
| 177 |
+
"Status: {1}. Reason: {2}"
|
| 178 |
+
.format(ex, status, reason))
|
| 179 |
+
|
| 180 |
+
def execute_changeset(self, changeset_id, stack_name,
|
| 181 |
+
disable_rollback=False):
|
| 182 |
+
"""
|
| 183 |
+
Calls CloudFormation to execute changeset
|
| 184 |
+
|
| 185 |
+
:param changeset_id: ID of the changeset
|
| 186 |
+
:param stack_name: Name or ID of the stack
|
| 187 |
+
:param disable_rollback: Disable rollback of all resource changes
|
| 188 |
+
:return: Response from execute-change-set call
|
| 189 |
+
"""
|
| 190 |
+
return self._client.execute_change_set(
|
| 191 |
+
ChangeSetName=changeset_id,
|
| 192 |
+
StackName=stack_name,
|
| 193 |
+
DisableRollback=disable_rollback)
|
| 194 |
+
|
| 195 |
+
def wait_for_execute(self, stack_name, changeset_type):
|
| 196 |
+
|
| 197 |
+
sys.stdout.write("Waiting for stack create/update to complete\n")
|
| 198 |
+
sys.stdout.flush()
|
| 199 |
+
|
| 200 |
+
# Pick the right waiter
|
| 201 |
+
if changeset_type == "CREATE":
|
| 202 |
+
waiter = self._client.get_waiter("stack_create_complete")
|
| 203 |
+
elif changeset_type == "UPDATE":
|
| 204 |
+
waiter = self._client.get_waiter("stack_update_complete")
|
| 205 |
+
else:
|
| 206 |
+
raise RuntimeError("Invalid changeset type {0}"
|
| 207 |
+
.format(changeset_type))
|
| 208 |
+
|
| 209 |
+
# Poll every 30 seconds. Polling too frequently risks hitting rate limits
|
| 210 |
+
# on CloudFormation's DescribeStacks API
|
| 211 |
+
waiter_config = {
|
| 212 |
+
'Delay': 30,
|
| 213 |
+
'MaxAttempts': 120,
|
| 214 |
+
}
|
| 215 |
+
|
| 216 |
+
try:
|
| 217 |
+
waiter.wait(StackName=stack_name, WaiterConfig=waiter_config)
|
| 218 |
+
except botocore.exceptions.WaiterError as ex:
|
| 219 |
+
LOG.debug("Execute changeset waiter exception", exc_info=ex)
|
| 220 |
+
|
| 221 |
+
raise exceptions.DeployFailedError(stack_name=stack_name)
|
| 222 |
+
|
| 223 |
+
def create_and_wait_for_changeset(self, stack_name, cfn_template,
|
| 224 |
+
parameter_values, capabilities, role_arn,
|
| 225 |
+
notification_arns, s3_uploader, tags):
|
| 226 |
+
|
| 227 |
+
result = self.create_changeset(
|
| 228 |
+
stack_name, cfn_template, parameter_values, capabilities,
|
| 229 |
+
role_arn, notification_arns, s3_uploader, tags)
|
| 230 |
+
self.wait_for_changeset(result.changeset_id, stack_name)
|
| 231 |
+
|
| 232 |
+
return result
|
data/lib/python3.10/site-packages/awscli/customizations/cloudformation/exceptions.py
ADDED
|
@@ -0,0 +1,59 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
|
| 2 |
+
class CloudFormationCommandError(Exception):
|
| 3 |
+
fmt = 'An unspecified error occurred'
|
| 4 |
+
|
| 5 |
+
def __init__(self, **kwargs):
|
| 6 |
+
msg = self.fmt.format(**kwargs)
|
| 7 |
+
Exception.__init__(self, msg)
|
| 8 |
+
self.kwargs = kwargs
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
class InvalidTemplatePathError(CloudFormationCommandError):
|
| 12 |
+
fmt = "Invalid template path {template_path}"
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
class ChangeEmptyError(CloudFormationCommandError):
|
| 16 |
+
fmt = "No changes to deploy. Stack {stack_name} is up to date"
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
class InvalidLocalPathError(CloudFormationCommandError):
|
| 20 |
+
fmt = ("Parameter {property_name} of resource {resource_id} refers "
|
| 21 |
+
"to a file or folder that does not exist {local_path}")
|
| 22 |
+
|
| 23 |
+
|
| 24 |
+
class InvalidTemplateUrlParameterError(CloudFormationCommandError):
|
| 25 |
+
fmt = ("{property_name} parameter of {resource_id} resource is invalid. "
|
| 26 |
+
"It must be a S3 URL or path to CloudFormation "
|
| 27 |
+
"template file. Actual: {template_path}")
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
class ExportFailedError(CloudFormationCommandError):
|
| 31 |
+
fmt = ("Unable to upload artifact {property_value} referenced "
|
| 32 |
+
"by {property_name} parameter of {resource_id} resource."
|
| 33 |
+
"\n"
|
| 34 |
+
"{ex}")
|
| 35 |
+
|
| 36 |
+
|
| 37 |
+
class InvalidKeyValuePairArgumentError(CloudFormationCommandError):
|
| 38 |
+
fmt = ("{value} value passed to --{argname} must be of format "
|
| 39 |
+
"Key=Value")
|
| 40 |
+
|
| 41 |
+
|
| 42 |
+
class DeployFailedError(CloudFormationCommandError):
|
| 43 |
+
fmt = \
|
| 44 |
+
("Failed to create/update the stack. Run the following command"
|
| 45 |
+
"\n"
|
| 46 |
+
"to fetch the list of events leading up to the failure"
|
| 47 |
+
"\n"
|
| 48 |
+
"aws cloudformation describe-stack-events --stack-name {stack_name}")
|
| 49 |
+
|
| 50 |
+
class DeployBucketRequiredError(CloudFormationCommandError):
|
| 51 |
+
fmt = \
|
| 52 |
+
("Templates with a size greater than 51,200 bytes must be deployed "
|
| 53 |
+
"via an S3 Bucket. Please add the --s3-bucket parameter to your "
|
| 54 |
+
"command. The local template will be copied to that S3 bucket and "
|
| 55 |
+
"then deployed.")
|
| 56 |
+
|
| 57 |
+
|
| 58 |
+
class InvalidForEachIntrinsicFunctionError(CloudFormationCommandError):
|
| 59 |
+
fmt = 'The value of {resource_id} has an invalid "Fn::ForEach::" format: Must be a list of three entries'
|
data/lib/python3.10/site-packages/awscli/customizations/cloudformation/package.py
ADDED
|
@@ -0,0 +1,181 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2012-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License"). You
|
| 4 |
+
# may not use this file except in compliance with the License. A copy of
|
| 5 |
+
# the License is located at
|
| 6 |
+
#
|
| 7 |
+
# http://aws.amazon.com/apache2.0/
|
| 8 |
+
#
|
| 9 |
+
# or in the "license" file accompanying this file. This file is
|
| 10 |
+
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
|
| 11 |
+
# ANY KIND, either express or implied. See the License for the specific
|
| 12 |
+
# language governing permissions and limitations under the License.
|
| 13 |
+
|
| 14 |
+
import os
|
| 15 |
+
import logging
|
| 16 |
+
import sys
|
| 17 |
+
|
| 18 |
+
import json
|
| 19 |
+
|
| 20 |
+
from botocore.client import Config
|
| 21 |
+
|
| 22 |
+
from awscli.customizations.cloudformation.artifact_exporter import Template
|
| 23 |
+
from awscli.customizations.cloudformation.yamlhelper import yaml_dump
|
| 24 |
+
from awscli.customizations.cloudformation import exceptions
|
| 25 |
+
from awscli.customizations.commands import BasicCommand
|
| 26 |
+
from awscli.customizations.s3uploader import S3Uploader
|
| 27 |
+
|
| 28 |
+
LOG = logging.getLogger(__name__)
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
class PackageCommand(BasicCommand):
|
| 32 |
+
|
| 33 |
+
MSG_PACKAGED_TEMPLATE_WRITTEN = (
|
| 34 |
+
"Successfully packaged artifacts and wrote output template "
|
| 35 |
+
"to file {output_file_name}."
|
| 36 |
+
"\n"
|
| 37 |
+
"Execute the following command to deploy the packaged template"
|
| 38 |
+
"\n"
|
| 39 |
+
"aws cloudformation deploy --template-file {output_file_path} "
|
| 40 |
+
"--stack-name <YOUR STACK NAME>"
|
| 41 |
+
"\n")
|
| 42 |
+
|
| 43 |
+
NAME = "package"
|
| 44 |
+
|
| 45 |
+
DESCRIPTION = BasicCommand.FROM_FILE("cloudformation",
|
| 46 |
+
"_package_description.rst")
|
| 47 |
+
|
| 48 |
+
ARG_TABLE = [
|
| 49 |
+
{
|
| 50 |
+
'name': 'template-file',
|
| 51 |
+
'required': True,
|
| 52 |
+
'help_text': (
|
| 53 |
+
'The path where your AWS CloudFormation'
|
| 54 |
+
' template is located.'
|
| 55 |
+
)
|
| 56 |
+
},
|
| 57 |
+
|
| 58 |
+
{
|
| 59 |
+
'name': 's3-bucket',
|
| 60 |
+
'required': True,
|
| 61 |
+
'help_text': (
|
| 62 |
+
'The name of the S3 bucket where this command uploads'
|
| 63 |
+
' the artifacts that are referenced in your template.'
|
| 64 |
+
)
|
| 65 |
+
},
|
| 66 |
+
|
| 67 |
+
{
|
| 68 |
+
'name': 's3-prefix',
|
| 69 |
+
'help_text': (
|
| 70 |
+
'A prefix name that the command adds to the'
|
| 71 |
+
' artifacts\' name when it uploads them to the S3 bucket.'
|
| 72 |
+
' The prefix name is a path name (folder name) for'
|
| 73 |
+
' the S3 bucket.'
|
| 74 |
+
)
|
| 75 |
+
},
|
| 76 |
+
|
| 77 |
+
{
|
| 78 |
+
'name': 'kms-key-id',
|
| 79 |
+
'help_text': (
|
| 80 |
+
'The ID of an AWS KMS key that the command uses'
|
| 81 |
+
' to encrypt artifacts that are at rest in the S3 bucket.'
|
| 82 |
+
)
|
| 83 |
+
},
|
| 84 |
+
|
| 85 |
+
{
|
| 86 |
+
"name": "output-template-file",
|
| 87 |
+
"help_text": (
|
| 88 |
+
"The path to the file where the command writes the"
|
| 89 |
+
" output AWS CloudFormation template. If you don't specify"
|
| 90 |
+
" a path, the command writes the template to the standard"
|
| 91 |
+
" output."
|
| 92 |
+
)
|
| 93 |
+
},
|
| 94 |
+
|
| 95 |
+
{
|
| 96 |
+
"name": "use-json",
|
| 97 |
+
"action": "store_true",
|
| 98 |
+
"help_text": (
|
| 99 |
+
"Indicates whether to use JSON as the format for the output AWS"
|
| 100 |
+
" CloudFormation template. YAML is used by default."
|
| 101 |
+
)
|
| 102 |
+
},
|
| 103 |
+
|
| 104 |
+
{
|
| 105 |
+
"name": "force-upload",
|
| 106 |
+
"action": "store_true",
|
| 107 |
+
"help_text": (
|
| 108 |
+
'Indicates whether to override existing files in the S3 bucket.'
|
| 109 |
+
' Specify this flag to upload artifacts even if they '
|
| 110 |
+
' match existing artifacts in the S3 bucket.'
|
| 111 |
+
)
|
| 112 |
+
},
|
| 113 |
+
{
|
| 114 |
+
"name": "metadata",
|
| 115 |
+
"cli_type_name": "map",
|
| 116 |
+
"schema": {
|
| 117 |
+
"type": "map",
|
| 118 |
+
"key": {"type": "string"},
|
| 119 |
+
"value": {"type": "string"}
|
| 120 |
+
},
|
| 121 |
+
"help_text": "A map of metadata to attach to *ALL* the artifacts that"
|
| 122 |
+
" are referenced in your template."
|
| 123 |
+
}
|
| 124 |
+
]
|
| 125 |
+
|
| 126 |
+
def _run_main(self, parsed_args, parsed_globals):
|
| 127 |
+
s3_client = self._session.create_client(
|
| 128 |
+
"s3",
|
| 129 |
+
config=Config(signature_version='s3v4'),
|
| 130 |
+
region_name=parsed_globals.region,
|
| 131 |
+
verify=parsed_globals.verify_ssl)
|
| 132 |
+
|
| 133 |
+
template_path = parsed_args.template_file
|
| 134 |
+
if not os.path.isfile(template_path):
|
| 135 |
+
raise exceptions.InvalidTemplatePathError(
|
| 136 |
+
template_path=template_path)
|
| 137 |
+
|
| 138 |
+
bucket = parsed_args.s3_bucket
|
| 139 |
+
|
| 140 |
+
self.s3_uploader = S3Uploader(s3_client,
|
| 141 |
+
bucket,
|
| 142 |
+
parsed_args.s3_prefix,
|
| 143 |
+
parsed_args.kms_key_id,
|
| 144 |
+
parsed_args.force_upload)
|
| 145 |
+
# attach the given metadata to the artifacts to be uploaded
|
| 146 |
+
self.s3_uploader.artifact_metadata = parsed_args.metadata
|
| 147 |
+
|
| 148 |
+
output_file = parsed_args.output_template_file
|
| 149 |
+
use_json = parsed_args.use_json
|
| 150 |
+
exported_str = self._export(template_path, use_json)
|
| 151 |
+
|
| 152 |
+
sys.stdout.write("\n")
|
| 153 |
+
self.write_output(output_file, exported_str)
|
| 154 |
+
|
| 155 |
+
if output_file:
|
| 156 |
+
msg = self.MSG_PACKAGED_TEMPLATE_WRITTEN.format(
|
| 157 |
+
output_file_name=output_file,
|
| 158 |
+
output_file_path=os.path.abspath(output_file))
|
| 159 |
+
sys.stdout.write(msg)
|
| 160 |
+
|
| 161 |
+
sys.stdout.flush()
|
| 162 |
+
return 0
|
| 163 |
+
|
| 164 |
+
def _export(self, template_path, use_json):
|
| 165 |
+
template = Template(template_path, os.getcwd(), self.s3_uploader)
|
| 166 |
+
exported_template = template.export()
|
| 167 |
+
|
| 168 |
+
if use_json:
|
| 169 |
+
exported_str = json.dumps(exported_template, indent=4, ensure_ascii=False)
|
| 170 |
+
else:
|
| 171 |
+
exported_str = yaml_dump(exported_template)
|
| 172 |
+
|
| 173 |
+
return exported_str
|
| 174 |
+
|
| 175 |
+
def write_output(self, output_file_name, data):
|
| 176 |
+
if output_file_name is None:
|
| 177 |
+
sys.stdout.write(data)
|
| 178 |
+
return
|
| 179 |
+
|
| 180 |
+
with open(output_file_name, "w") as fp:
|
| 181 |
+
fp.write(data)
|
data/lib/python3.10/site-packages/awscli/customizations/cloudformation/yamlhelper.py
ADDED
|
@@ -0,0 +1,104 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2012-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License"). You
|
| 4 |
+
# may not use this file except in compliance with the License. A copy of
|
| 5 |
+
# the License is located at
|
| 6 |
+
#
|
| 7 |
+
# http://aws.amazon.com/apache2.0/
|
| 8 |
+
#
|
| 9 |
+
# or in the "license" file accompanying this file. This file is
|
| 10 |
+
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
|
| 11 |
+
# ANY KIND, either express or implied. See the License for the specific
|
| 12 |
+
# language governing permissions and limitations under the License.
|
| 13 |
+
from botocore.compat import json
|
| 14 |
+
from botocore.compat import OrderedDict
|
| 15 |
+
|
| 16 |
+
import yaml
|
| 17 |
+
from yaml.resolver import ScalarNode, SequenceNode
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
def intrinsics_multi_constructor(loader, tag_prefix, node):
|
| 21 |
+
"""
|
| 22 |
+
YAML constructor to parse CloudFormation intrinsics.
|
| 23 |
+
This will return a dictionary with key being the intrinsic name
|
| 24 |
+
"""
|
| 25 |
+
|
| 26 |
+
# Get the actual tag name excluding the first exclamation
|
| 27 |
+
tag = node.tag[1:]
|
| 28 |
+
|
| 29 |
+
# Some intrinsic functions doesn't support prefix "Fn::"
|
| 30 |
+
prefix = "Fn::"
|
| 31 |
+
if tag in ["Ref", "Condition"]:
|
| 32 |
+
prefix = ""
|
| 33 |
+
|
| 34 |
+
cfntag = prefix + tag
|
| 35 |
+
|
| 36 |
+
if tag == "GetAtt" and isinstance(node.value, str):
|
| 37 |
+
# ShortHand notation for !GetAtt accepts Resource.Attribute format
|
| 38 |
+
# while the standard notation is to use an array
|
| 39 |
+
# [Resource, Attribute]. Convert shorthand to standard format
|
| 40 |
+
value = node.value.split(".", 1)
|
| 41 |
+
|
| 42 |
+
elif isinstance(node, ScalarNode):
|
| 43 |
+
# Value of this node is scalar
|
| 44 |
+
value = loader.construct_scalar(node)
|
| 45 |
+
|
| 46 |
+
elif isinstance(node, SequenceNode):
|
| 47 |
+
# Value of this node is an array (Ex: [1,2])
|
| 48 |
+
value = loader.construct_sequence(node)
|
| 49 |
+
|
| 50 |
+
else:
|
| 51 |
+
# Value of this node is an mapping (ex: {foo: bar})
|
| 52 |
+
value = loader.construct_mapping(node)
|
| 53 |
+
|
| 54 |
+
return {cfntag: value}
|
| 55 |
+
|
| 56 |
+
|
| 57 |
+
def _dict_representer(dumper, data):
|
| 58 |
+
return dumper.represent_dict(data.items())
|
| 59 |
+
|
| 60 |
+
|
| 61 |
+
def yaml_dump(dict_to_dump):
|
| 62 |
+
"""
|
| 63 |
+
Dumps the dictionary as a YAML document
|
| 64 |
+
:param dict_to_dump:
|
| 65 |
+
:return:
|
| 66 |
+
"""
|
| 67 |
+
FlattenAliasDumper.add_representer(OrderedDict, _dict_representer)
|
| 68 |
+
return yaml.dump(
|
| 69 |
+
dict_to_dump,
|
| 70 |
+
default_flow_style=False,
|
| 71 |
+
Dumper=FlattenAliasDumper,
|
| 72 |
+
)
|
| 73 |
+
|
| 74 |
+
|
| 75 |
+
def _dict_constructor(loader, node):
|
| 76 |
+
# Necessary in order to make yaml merge tags work
|
| 77 |
+
loader.flatten_mapping(node)
|
| 78 |
+
return OrderedDict(loader.construct_pairs(node))
|
| 79 |
+
|
| 80 |
+
|
| 81 |
+
class SafeLoaderWrapper(yaml.SafeLoader):
|
| 82 |
+
"""Isolated safe loader to allow for customizations without global changes.
|
| 83 |
+
"""
|
| 84 |
+
|
| 85 |
+
pass
|
| 86 |
+
|
| 87 |
+
def yaml_parse(yamlstr):
|
| 88 |
+
"""Parse a yaml string"""
|
| 89 |
+
try:
|
| 90 |
+
# PyYAML doesn't support json as well as it should, so if the input
|
| 91 |
+
# is actually just json it is better to parse it with the standard
|
| 92 |
+
# json parser.
|
| 93 |
+
return json.loads(yamlstr, object_pairs_hook=OrderedDict)
|
| 94 |
+
except ValueError:
|
| 95 |
+
loader = SafeLoaderWrapper
|
| 96 |
+
loader.add_constructor(yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG,
|
| 97 |
+
_dict_constructor)
|
| 98 |
+
loader.add_multi_constructor("!", intrinsics_multi_constructor)
|
| 99 |
+
return yaml.load(yamlstr, loader)
|
| 100 |
+
|
| 101 |
+
|
| 102 |
+
class FlattenAliasDumper(yaml.SafeDumper):
|
| 103 |
+
def ignore_aliases(self, data):
|
| 104 |
+
return True
|
data/lib/python3.10/site-packages/awscli/customizations/dlm/__init__.py
ADDED
|
@@ -0,0 +1,12 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License"). You
|
| 4 |
+
# may not use this file except in compliance with the License. A copy of
|
| 5 |
+
# the License is located at
|
| 6 |
+
#
|
| 7 |
+
# http://aws.amazon.com/apache2.0/
|
| 8 |
+
#
|
| 9 |
+
# or in the "license" file accompanying this file. This file is
|
| 10 |
+
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
|
| 11 |
+
# ANY KIND, either express or implied. See the License for the specific
|
| 12 |
+
# language governing permissions and limitations under the License.
|
data/lib/python3.10/site-packages/awscli/customizations/dlm/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (175 Bytes). View file
|
|
|
data/lib/python3.10/site-packages/awscli/customizations/dlm/__pycache__/constants.cpython-310.pyc
ADDED
|
Binary file (957 Bytes). View file
|
|
|
data/lib/python3.10/site-packages/awscli/customizations/dlm/__pycache__/createdefaultrole.cpython-310.pyc
ADDED
|
Binary file (4.24 kB). View file
|
|
|
data/lib/python3.10/site-packages/awscli/customizations/dlm/__pycache__/dlm.cpython-310.pyc
ADDED
|
Binary file (874 Bytes). View file
|
|
|
data/lib/python3.10/site-packages/awscli/customizations/dlm/__pycache__/iam.cpython-310.pyc
ADDED
|
Binary file (2.1 kB). View file
|
|
|
data/lib/python3.10/site-packages/awscli/customizations/dlm/constants.py
ADDED
|
@@ -0,0 +1,53 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License"). You
|
| 4 |
+
# may not use this file except in compliance with the License. A copy of
|
| 5 |
+
# the License is located at
|
| 6 |
+
#
|
| 7 |
+
# http://aws.amazon.com/apache2.0/
|
| 8 |
+
#
|
| 9 |
+
# or in the "license" file accompanying this file. This file is
|
| 10 |
+
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
|
| 11 |
+
# ANY KIND, either express or implied. See the License for the specific
|
| 12 |
+
# language governing permissions and limitations under the License.
|
| 13 |
+
|
| 14 |
+
# Declare all the constants used by Lifecycle in this file
|
| 15 |
+
|
| 16 |
+
# Lifecycle role names
|
| 17 |
+
LIFECYCLE_DEFAULT_ROLE_NAME = "AWSDataLifecycleManagerDefaultRole"
|
| 18 |
+
LIFECYCLE_DEFAULT_ROLE_NAME_AMI = \
|
| 19 |
+
"AWSDataLifecycleManagerDefaultRoleForAMIManagement"
|
| 20 |
+
|
| 21 |
+
# Lifecycle role arn names
|
| 22 |
+
LIFECYCLE_DEFAULT_MANAGED_POLICY_NAME = "AWSDataLifecycleManagerServiceRole"
|
| 23 |
+
LIFECYCLE_DEFAULT_MANAGED_POLICY_NAME_AMI = \
|
| 24 |
+
"AWSDataLifecycleManagerServiceRoleForAMIManagement"
|
| 25 |
+
|
| 26 |
+
POLICY_ARN_PATTERN = "arn:{0}:iam::aws:policy/service-role/{1}"
|
| 27 |
+
|
| 28 |
+
# Assume Role Policy definitions for roles
|
| 29 |
+
LIFECYCLE_DEFAULT_ROLE_ASSUME_POLICY = {
|
| 30 |
+
"Version": "2012-10-17",
|
| 31 |
+
"Statement": [
|
| 32 |
+
{
|
| 33 |
+
"Sid": "",
|
| 34 |
+
"Effect": "Allow",
|
| 35 |
+
"Principal": {"Service": "dlm.amazonaws.com"},
|
| 36 |
+
"Action": "sts:AssumeRole"
|
| 37 |
+
}
|
| 38 |
+
]
|
| 39 |
+
}
|
| 40 |
+
|
| 41 |
+
RESOURCE_TYPE_SNAPSHOT = "snapshot"
|
| 42 |
+
RESOURCE_TYPE_IMAGE = "image"
|
| 43 |
+
|
| 44 |
+
RESOURCES = {
|
| 45 |
+
RESOURCE_TYPE_SNAPSHOT: {
|
| 46 |
+
'default_role_name': LIFECYCLE_DEFAULT_ROLE_NAME,
|
| 47 |
+
'default_policy_name': LIFECYCLE_DEFAULT_MANAGED_POLICY_NAME
|
| 48 |
+
},
|
| 49 |
+
RESOURCE_TYPE_IMAGE: {
|
| 50 |
+
'default_role_name': LIFECYCLE_DEFAULT_ROLE_NAME_AMI,
|
| 51 |
+
'default_policy_name': LIFECYCLE_DEFAULT_MANAGED_POLICY_NAME_AMI
|
| 52 |
+
}
|
| 53 |
+
}
|
data/lib/python3.10/site-packages/awscli/customizations/dlm/createdefaultrole.py
ADDED
|
@@ -0,0 +1,167 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License"). You
|
| 4 |
+
# may not use this file except in compliance with the License. A copy of
|
| 5 |
+
# the License is located at
|
| 6 |
+
#
|
| 7 |
+
# http://aws.amazon.com/apache2.0/
|
| 8 |
+
#
|
| 9 |
+
# or in the "license" file accompanying this file. This file is
|
| 10 |
+
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
|
| 11 |
+
# ANY KIND, either express or implied. See the License for the specific
|
| 12 |
+
# language governing permissions and limitations under the License.
|
| 13 |
+
|
| 14 |
+
# Class to create default roles for lifecycle
|
| 15 |
+
import logging
|
| 16 |
+
from awscli.clidriver import CLIOperationCaller
|
| 17 |
+
from awscli.customizations.utils import get_policy_arn_suffix
|
| 18 |
+
from awscli.customizations.commands import BasicCommand
|
| 19 |
+
from awscli.customizations.dlm.iam import IAM
|
| 20 |
+
from awscli.customizations.dlm.constants \
|
| 21 |
+
import RESOURCES, \
|
| 22 |
+
LIFECYCLE_DEFAULT_ROLE_ASSUME_POLICY, \
|
| 23 |
+
POLICY_ARN_PATTERN, \
|
| 24 |
+
RESOURCE_TYPE_SNAPSHOT, \
|
| 25 |
+
RESOURCE_TYPE_IMAGE
|
| 26 |
+
|
| 27 |
+
LOG = logging.getLogger(__name__)
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
def _construct_result(create_role_response, get_policy_response):
|
| 31 |
+
get_policy_response.pop('ResponseMetadata', None)
|
| 32 |
+
create_role_response.pop('ResponseMetadata', None)
|
| 33 |
+
result = {'RolePolicy': get_policy_response}
|
| 34 |
+
result.update(create_role_response)
|
| 35 |
+
return result
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
# Display the result as formatted json
|
| 39 |
+
def display_response(session, operation_name, result, parsed_globals):
|
| 40 |
+
if result is not None:
|
| 41 |
+
cli_operation_caller = CLIOperationCaller(session)
|
| 42 |
+
# Calling a private method. Should be changed after the functionality
|
| 43 |
+
# is moved outside CliOperationCaller.
|
| 44 |
+
cli_operation_caller._display_response(
|
| 45 |
+
operation_name, result, parsed_globals)
|
| 46 |
+
|
| 47 |
+
|
| 48 |
+
# Get policy arn from region and policy name
|
| 49 |
+
def get_policy_arn(region, policy_name):
|
| 50 |
+
region_suffix = get_policy_arn_suffix(region)
|
| 51 |
+
role_arn = POLICY_ARN_PATTERN.format(region_suffix, policy_name)
|
| 52 |
+
return role_arn
|
| 53 |
+
|
| 54 |
+
|
| 55 |
+
# Method to parse the arguments to get the region value
|
| 56 |
+
def get_region(session, parsed_globals):
|
| 57 |
+
region = parsed_globals.region
|
| 58 |
+
if region is None:
|
| 59 |
+
region = session.get_config_variable('region')
|
| 60 |
+
return region
|
| 61 |
+
|
| 62 |
+
|
| 63 |
+
class CreateDefaultRole(BasicCommand):
|
| 64 |
+
NAME = "create-default-role"
|
| 65 |
+
DESCRIPTION = ('Creates the default IAM role '
|
| 66 |
+
' which will be used by Lifecycle service.\n'
|
| 67 |
+
'If the role does not exist, create-default-role '
|
| 68 |
+
'will automatically create it and set its policy.'
|
| 69 |
+
' If the role has been already '
|
| 70 |
+
'created, create-default-role'
|
| 71 |
+
' will not update its policy.'
|
| 72 |
+
'\n')
|
| 73 |
+
ARG_TABLE = [
|
| 74 |
+
{'name': 'iam-endpoint',
|
| 75 |
+
'no_paramfile': True,
|
| 76 |
+
'help_text': '<p>The IAM endpoint to call for creating the roles.'
|
| 77 |
+
' This is optional and should only be specified when a'
|
| 78 |
+
' custom endpoint should be called for IAM operations'
|
| 79 |
+
'.</p>'},
|
| 80 |
+
{'name': 'resource-type',
|
| 81 |
+
'default': RESOURCE_TYPE_SNAPSHOT,
|
| 82 |
+
'choices': [RESOURCE_TYPE_SNAPSHOT, RESOURCE_TYPE_IMAGE],
|
| 83 |
+
'help_text': (
|
| 84 |
+
"<p>The resource type for which the role needs to be created."
|
| 85 |
+
" The available options are '%s' and '%s'."
|
| 86 |
+
" This parameter defaults to '%s'.</p>"
|
| 87 |
+
% (RESOURCE_TYPE_SNAPSHOT, RESOURCE_TYPE_IMAGE,
|
| 88 |
+
RESOURCE_TYPE_SNAPSHOT))}
|
| 89 |
+
|
| 90 |
+
]
|
| 91 |
+
|
| 92 |
+
def __init__(self, session):
|
| 93 |
+
super(CreateDefaultRole, self).__init__(session)
|
| 94 |
+
|
| 95 |
+
def _run_main(self, parsed_args, parsed_globals):
|
| 96 |
+
"""Call to run the commands"""
|
| 97 |
+
|
| 98 |
+
self._region = get_region(self._session, parsed_globals)
|
| 99 |
+
self._endpoint_url = parsed_args.iam_endpoint
|
| 100 |
+
self._resource_type = parsed_args.resource_type
|
| 101 |
+
self._iam_client = IAM(self._session.create_client(
|
| 102 |
+
'iam',
|
| 103 |
+
region_name=self._region,
|
| 104 |
+
endpoint_url=self._endpoint_url,
|
| 105 |
+
verify=parsed_globals.verify_ssl
|
| 106 |
+
))
|
| 107 |
+
|
| 108 |
+
result = self._create_default_role_if_not_exists(parsed_globals)
|
| 109 |
+
|
| 110 |
+
display_response(
|
| 111 |
+
self._session,
|
| 112 |
+
'create_role',
|
| 113 |
+
result,
|
| 114 |
+
parsed_globals
|
| 115 |
+
)
|
| 116 |
+
|
| 117 |
+
return 0
|
| 118 |
+
|
| 119 |
+
def _create_default_role_if_not_exists(self, parsed_globals):
|
| 120 |
+
"""Method to create default lifecycle role
|
| 121 |
+
if it doesn't exist already
|
| 122 |
+
"""
|
| 123 |
+
|
| 124 |
+
role_name = RESOURCES[self._resource_type]['default_role_name']
|
| 125 |
+
assume_role_policy = LIFECYCLE_DEFAULT_ROLE_ASSUME_POLICY
|
| 126 |
+
|
| 127 |
+
if self._iam_client.check_if_role_exists(role_name):
|
| 128 |
+
LOG.debug('Role %s exists', role_name)
|
| 129 |
+
return None
|
| 130 |
+
|
| 131 |
+
LOG.debug('Role %s does not exist. '
|
| 132 |
+
'Creating default role for Lifecycle', role_name)
|
| 133 |
+
|
| 134 |
+
# Get Region
|
| 135 |
+
region = get_region(self._session, parsed_globals)
|
| 136 |
+
|
| 137 |
+
if region is None:
|
| 138 |
+
raise ValueError('You must specify a region. '
|
| 139 |
+
'You can also configure your region '
|
| 140 |
+
'by running "aws configure".')
|
| 141 |
+
|
| 142 |
+
managed_policy_arn = get_policy_arn(
|
| 143 |
+
region,
|
| 144 |
+
RESOURCES[self._resource_type]['default_policy_name']
|
| 145 |
+
)
|
| 146 |
+
|
| 147 |
+
# Don't proceed if managed policy does not exist
|
| 148 |
+
if not self._iam_client.check_if_policy_exists(managed_policy_arn):
|
| 149 |
+
LOG.debug('Managed Policy %s does not exist.', managed_policy_arn)
|
| 150 |
+
return None
|
| 151 |
+
|
| 152 |
+
LOG.debug('Managed Policy %s exists.', managed_policy_arn)
|
| 153 |
+
# Create default role
|
| 154 |
+
create_role_response = \
|
| 155 |
+
self._iam_client.create_role_with_trust_policy(
|
| 156 |
+
role_name,
|
| 157 |
+
assume_role_policy
|
| 158 |
+
)
|
| 159 |
+
# Attach policy to role
|
| 160 |
+
self._iam_client.attach_policy_to_role(
|
| 161 |
+
managed_policy_arn,
|
| 162 |
+
role_name
|
| 163 |
+
)
|
| 164 |
+
|
| 165 |
+
# Construct result
|
| 166 |
+
get_policy_response = self._iam_client.get_policy(managed_policy_arn)
|
| 167 |
+
return _construct_result(create_role_response, get_policy_response)
|
data/lib/python3.10/site-packages/awscli/customizations/dlm/dlm.py
ADDED
|
@@ -0,0 +1,30 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License"). You
|
| 4 |
+
# may not use this file except in compliance with the License. A copy of
|
| 5 |
+
# the License is located at
|
| 6 |
+
#
|
| 7 |
+
# http://aws.amazon.com/apache2.0/
|
| 8 |
+
#
|
| 9 |
+
# or in the "license" file accompanying this file. This file is
|
| 10 |
+
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
|
| 11 |
+
# ANY KIND, either express or implied. See the License for the specific
|
| 12 |
+
# language governing permissions and limitations under the License.
|
| 13 |
+
|
| 14 |
+
from awscli.customizations.dlm.createdefaultrole import CreateDefaultRole
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
def dlm_initialize(cli):
|
| 18 |
+
"""
|
| 19 |
+
The entry point for Lifecycle high level commands.
|
| 20 |
+
"""
|
| 21 |
+
cli.register('building-command-table.dlm', register_commands)
|
| 22 |
+
|
| 23 |
+
|
| 24 |
+
def register_commands(command_table, session, **kwargs):
|
| 25 |
+
"""
|
| 26 |
+
Called when the Lifecycle command table is being built. Used to inject new
|
| 27 |
+
high level commands into the command list. These high level commands
|
| 28 |
+
must not collide with existing low-level API call names.
|
| 29 |
+
"""
|
| 30 |
+
command_table['create-default-role'] = CreateDefaultRole(session)
|
data/lib/python3.10/site-packages/awscli/customizations/dlm/iam.py
ADDED
|
@@ -0,0 +1,51 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import json
|
| 2 |
+
|
| 3 |
+
|
| 4 |
+
class IAM(object):
|
| 5 |
+
|
| 6 |
+
def __init__(self, iam_client):
|
| 7 |
+
self.iam_client = iam_client
|
| 8 |
+
|
| 9 |
+
def check_if_role_exists(self, role_name):
|
| 10 |
+
"""Method to verify if a particular role exists"""
|
| 11 |
+
try:
|
| 12 |
+
self.iam_client.get_role(RoleName=role_name)
|
| 13 |
+
except self.iam_client.exceptions.NoSuchEntityException:
|
| 14 |
+
return False
|
| 15 |
+
return True
|
| 16 |
+
|
| 17 |
+
def check_if_policy_exists(self, policy_arn):
|
| 18 |
+
"""Method to verify if a particular policy exists"""
|
| 19 |
+
try:
|
| 20 |
+
self.iam_client.get_policy(PolicyArn=policy_arn)
|
| 21 |
+
except self.iam_client.exceptions.NoSuchEntityException:
|
| 22 |
+
return False
|
| 23 |
+
return True
|
| 24 |
+
|
| 25 |
+
def attach_policy_to_role(self, policy_arn, role_name):
|
| 26 |
+
"""Method to attach LifecyclePolicy to role specified by role_name"""
|
| 27 |
+
return self.iam_client.attach_role_policy(
|
| 28 |
+
PolicyArn=policy_arn,
|
| 29 |
+
RoleName=role_name
|
| 30 |
+
)
|
| 31 |
+
|
| 32 |
+
def create_role_with_trust_policy(self, role_name, assume_role_policy):
|
| 33 |
+
"""Method to create role with a given role name
|
| 34 |
+
and assume_role_policy
|
| 35 |
+
"""
|
| 36 |
+
return self.iam_client.create_role(
|
| 37 |
+
RoleName=role_name,
|
| 38 |
+
AssumeRolePolicyDocument=json.dumps(assume_role_policy))
|
| 39 |
+
|
| 40 |
+
def get_policy(self, arn):
|
| 41 |
+
"""Method to get the Policy for a particular ARN
|
| 42 |
+
This is used to display the policy contents to the user
|
| 43 |
+
"""
|
| 44 |
+
pol_det = self.iam_client.get_policy(PolicyArn=arn)
|
| 45 |
+
policy_version_details = self.iam_client.get_policy_version(
|
| 46 |
+
PolicyArn=arn,
|
| 47 |
+
VersionId=pol_det.get("Policy", {}).get("DefaultVersionId", "")
|
| 48 |
+
)
|
| 49 |
+
return policy_version_details\
|
| 50 |
+
.get("PolicyVersion", {})\
|
| 51 |
+
.get("Document", {})
|
data/lib/python3.10/site-packages/awscli/customizations/eks/__init__.py
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License"). You
|
| 4 |
+
# may not use this file except in compliance with the License. A copy of
|
| 5 |
+
# the License is located at
|
| 6 |
+
#
|
| 7 |
+
# http://aws.amazon.com/apache2.0/
|
| 8 |
+
#
|
| 9 |
+
# or in the "license" file accompanying this file. This file is
|
| 10 |
+
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
|
| 11 |
+
# ANY KIND, either express or implied. See the License for the specific
|
| 12 |
+
# language governing permissions and limitations under the License.
|
| 13 |
+
|
| 14 |
+
from awscli.customizations.eks.update_kubeconfig import UpdateKubeconfigCommand
|
| 15 |
+
from awscli.customizations.eks.get_token import GetTokenCommand
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
def initialize(cli):
|
| 19 |
+
"""
|
| 20 |
+
The entry point for EKS high level commands.
|
| 21 |
+
"""
|
| 22 |
+
cli.register('building-command-table.eks', inject_commands)
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
def inject_commands(command_table, session, **kwargs):
|
| 26 |
+
"""
|
| 27 |
+
Called when the EKS command table is being built.
|
| 28 |
+
Used to inject new high level commands into the command list.
|
| 29 |
+
"""
|
| 30 |
+
command_table['update-kubeconfig'] = UpdateKubeconfigCommand(session)
|
| 31 |
+
command_table['get-token'] = GetTokenCommand(session)
|
data/lib/python3.10/site-packages/awscli/customizations/eks/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (885 Bytes). View file
|
|
|
data/lib/python3.10/site-packages/awscli/customizations/eks/__pycache__/exceptions.cpython-310.pyc
ADDED
|
Binary file (557 Bytes). View file
|
|
|
data/lib/python3.10/site-packages/awscli/customizations/eks/__pycache__/get_token.cpython-310.pyc
ADDED
|
Binary file (7.45 kB). View file
|
|
|
data/lib/python3.10/site-packages/awscli/customizations/eks/__pycache__/kubeconfig.cpython-310.pyc
ADDED
|
Binary file (9.47 kB). View file
|
|
|
data/lib/python3.10/site-packages/awscli/customizations/eks/__pycache__/ordered_yaml.cpython-310.pyc
ADDED
|
Binary file (1.77 kB). View file
|
|
|
data/lib/python3.10/site-packages/awscli/customizations/eks/__pycache__/update_kubeconfig.cpython-310.pyc
ADDED
|
Binary file (8.53 kB). View file
|
|
|
data/lib/python3.10/site-packages/awscli/customizations/eks/exceptions.py
ADDED
|
@@ -0,0 +1,20 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License"). You
|
| 4 |
+
# may not use this file except in compliance with the License. A copy of
|
| 5 |
+
# the License is located at
|
| 6 |
+
#
|
| 7 |
+
# http://aws.amazon.com/apache2.0/
|
| 8 |
+
#
|
| 9 |
+
# or in the "license" file accompanying this file. This file is
|
| 10 |
+
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
|
| 11 |
+
# ANY KIND, either express or implied. See the License for the specific
|
| 12 |
+
# language governing permissions and limitations under the License.
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
class EKSError(Exception):
|
| 16 |
+
""" Base class for all EKSErrors."""
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
class EKSClusterError(EKSError):
|
| 20 |
+
""" Raised when a cluster is not in the correct state."""
|
data/lib/python3.10/site-packages/awscli/customizations/eks/get_token.py
ADDED
|
@@ -0,0 +1,276 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License"). You
|
| 4 |
+
# may not use this file except in compliance with the License. A copy of
|
| 5 |
+
# the License is located at
|
| 6 |
+
#
|
| 7 |
+
# http://aws.amazon.com/apache2.0/
|
| 8 |
+
#
|
| 9 |
+
# or in the "license" file accompanying this file. This file is
|
| 10 |
+
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
|
| 11 |
+
# ANY KIND, either express or implied. See the License for the specific
|
| 12 |
+
# language governing permissions and limitations under the License.
|
| 13 |
+
import base64
|
| 14 |
+
import botocore
|
| 15 |
+
import json
|
| 16 |
+
import os
|
| 17 |
+
import sys
|
| 18 |
+
|
| 19 |
+
from datetime import datetime, timedelta
|
| 20 |
+
from botocore.signers import RequestSigner
|
| 21 |
+
from botocore.model import ServiceId
|
| 22 |
+
|
| 23 |
+
from awscli.formatter import get_formatter
|
| 24 |
+
from awscli.customizations.commands import BasicCommand
|
| 25 |
+
from awscli.customizations.utils import uni_print
|
| 26 |
+
from awscli.customizations.utils import validate_mutually_exclusive
|
| 27 |
+
|
| 28 |
+
AUTH_SERVICE = "sts"
|
| 29 |
+
AUTH_COMMAND = "GetCallerIdentity"
|
| 30 |
+
AUTH_API_VERSION = "2011-06-15"
|
| 31 |
+
AUTH_SIGNING_VERSION = "v4"
|
| 32 |
+
|
| 33 |
+
ALPHA_API = "client.authentication.k8s.io/v1alpha1"
|
| 34 |
+
BETA_API = "client.authentication.k8s.io/v1beta1"
|
| 35 |
+
V1_API = "client.authentication.k8s.io/v1"
|
| 36 |
+
|
| 37 |
+
FULLY_SUPPORTED_API_VERSIONS = [
|
| 38 |
+
V1_API,
|
| 39 |
+
BETA_API,
|
| 40 |
+
]
|
| 41 |
+
DEPRECATED_API_VERSIONS = [
|
| 42 |
+
ALPHA_API,
|
| 43 |
+
]
|
| 44 |
+
|
| 45 |
+
ERROR_MSG_TPL = (
|
| 46 |
+
"{0} KUBERNETES_EXEC_INFO, defaulting to {1}. This is likely a "
|
| 47 |
+
"bug in your Kubernetes client. Please update your Kubernetes "
|
| 48 |
+
"client."
|
| 49 |
+
)
|
| 50 |
+
UNRECOGNIZED_MSG_TPL = (
|
| 51 |
+
"Unrecognized API version in KUBERNETES_EXEC_INFO, defaulting to "
|
| 52 |
+
"{0}. This is likely due to an outdated AWS "
|
| 53 |
+
"CLI. Please update your AWS CLI."
|
| 54 |
+
)
|
| 55 |
+
DEPRECATION_MSG_TPL = (
|
| 56 |
+
"Kubeconfig user entry is using deprecated API version {0}. Run "
|
| 57 |
+
"'aws eks update-kubeconfig' to update."
|
| 58 |
+
)
|
| 59 |
+
|
| 60 |
+
# Presigned url timeout in seconds
|
| 61 |
+
URL_TIMEOUT = 60
|
| 62 |
+
|
| 63 |
+
TOKEN_EXPIRATION_MINS = 14
|
| 64 |
+
|
| 65 |
+
TOKEN_PREFIX = 'k8s-aws-v1.'
|
| 66 |
+
|
| 67 |
+
K8S_AWS_ID_HEADER = 'x-k8s-aws-id'
|
| 68 |
+
|
| 69 |
+
|
| 70 |
+
class GetTokenCommand(BasicCommand):
|
| 71 |
+
NAME = 'get-token'
|
| 72 |
+
|
| 73 |
+
DESCRIPTION = (
|
| 74 |
+
"Get a token for authentication with an Amazon EKS cluster. "
|
| 75 |
+
"This can be used as an alternative to the "
|
| 76 |
+
"aws-iam-authenticator."
|
| 77 |
+
)
|
| 78 |
+
|
| 79 |
+
ARG_TABLE = [
|
| 80 |
+
{
|
| 81 |
+
'name': 'cluster-name',
|
| 82 |
+
'help_text': (
|
| 83 |
+
"Specify the name of the Amazon EKS cluster to create a token for. (Note: for local clusters on AWS Outposts, please use --cluster-id parameter)"
|
| 84 |
+
),
|
| 85 |
+
'required': False,
|
| 86 |
+
},
|
| 87 |
+
{
|
| 88 |
+
'name': 'role-arn',
|
| 89 |
+
'help_text': (
|
| 90 |
+
"Assume this role for credentials when signing the token. "
|
| 91 |
+
"Use this optional parameter when the credentials for signing "
|
| 92 |
+
"the token differ from that of the current role session. "
|
| 93 |
+
"Using this parameter results in new role session credentials "
|
| 94 |
+
"that are used to sign the token."
|
| 95 |
+
),
|
| 96 |
+
'required': False,
|
| 97 |
+
},
|
| 98 |
+
{
|
| 99 |
+
'name': 'cluster-id',
|
| 100 |
+
# When EKS in-region cluster supports cluster-id, we will need to update this help text
|
| 101 |
+
'help_text': (
|
| 102 |
+
"Specify the id of the Amazon EKS cluster to create a token for. (Note: for local clusters on AWS Outposts only)"
|
| 103 |
+
),
|
| 104 |
+
'required': False,
|
| 105 |
+
},
|
| 106 |
+
]
|
| 107 |
+
|
| 108 |
+
def get_expiration_time(self):
|
| 109 |
+
token_expiration = datetime.utcnow() + timedelta(
|
| 110 |
+
minutes=TOKEN_EXPIRATION_MINS
|
| 111 |
+
)
|
| 112 |
+
return token_expiration.strftime('%Y-%m-%dT%H:%M:%SZ')
|
| 113 |
+
|
| 114 |
+
def _run_main(self, parsed_args, parsed_globals):
|
| 115 |
+
client_factory = STSClientFactory(self._session)
|
| 116 |
+
sts_client = client_factory.get_sts_client(
|
| 117 |
+
region_name=parsed_globals.region, role_arn=parsed_args.role_arn
|
| 118 |
+
)
|
| 119 |
+
|
| 120 |
+
validate_mutually_exclusive(parsed_args, ['cluster_name'], ['cluster_id'])
|
| 121 |
+
|
| 122 |
+
if parsed_args.cluster_id:
|
| 123 |
+
identifier = parsed_args.cluster_id
|
| 124 |
+
elif parsed_args.cluster_name:
|
| 125 |
+
identifier = parsed_args.cluster_name
|
| 126 |
+
else:
|
| 127 |
+
return ValueError("Either parameter --cluster-name or --cluster-id must be specified.")
|
| 128 |
+
|
| 129 |
+
token = TokenGenerator(sts_client).get_token(identifier)
|
| 130 |
+
|
| 131 |
+
# By default STS signs the url for 15 minutes so we are creating a
|
| 132 |
+
# rfc3339 timestamp with expiration in 14 minutes as part of the token, which
|
| 133 |
+
# is used by some clients (client-go) who will refresh the token after 14 mins
|
| 134 |
+
token_expiration = self.get_expiration_time()
|
| 135 |
+
|
| 136 |
+
full_object = {
|
| 137 |
+
"kind": "ExecCredential",
|
| 138 |
+
"apiVersion": self.discover_api_version(),
|
| 139 |
+
"spec": {},
|
| 140 |
+
"status": {
|
| 141 |
+
"expirationTimestamp": token_expiration,
|
| 142 |
+
"token": token,
|
| 143 |
+
},
|
| 144 |
+
}
|
| 145 |
+
|
| 146 |
+
output = parsed_globals.output
|
| 147 |
+
if output is None:
|
| 148 |
+
output = self._session.get_config_variable('output')
|
| 149 |
+
formatter = get_formatter(output, parsed_globals)
|
| 150 |
+
formatter.query = parsed_globals.query
|
| 151 |
+
|
| 152 |
+
formatter(self.NAME, full_object)
|
| 153 |
+
uni_print('\n')
|
| 154 |
+
return 0
|
| 155 |
+
|
| 156 |
+
def discover_api_version(self):
|
| 157 |
+
"""
|
| 158 |
+
Parses the KUBERNETES_EXEC_INFO environment variable and returns the
|
| 159 |
+
API version. If the environment variable is malformed or invalid,
|
| 160 |
+
return the v1beta1 response and print a message to stderr.
|
| 161 |
+
|
| 162 |
+
If the v1alpha1 API is specified explicitly, a message is printed to
|
| 163 |
+
stderr with instructions to update.
|
| 164 |
+
|
| 165 |
+
:return: The client authentication API version
|
| 166 |
+
:rtype: string
|
| 167 |
+
"""
|
| 168 |
+
# At the time Kubernetes v1.29 is released upstream (approx Dec 2023),
|
| 169 |
+
# "v1beta1" will be removed. At or around that time, EKS will likely
|
| 170 |
+
# support v1.22 through v1.28, in which client API version "v1beta1"
|
| 171 |
+
# will be supported by all EKS versions.
|
| 172 |
+
fallback_api_version = BETA_API
|
| 173 |
+
|
| 174 |
+
error_prefixes = {
|
| 175 |
+
"error": "Error parsing",
|
| 176 |
+
"empty": "Empty",
|
| 177 |
+
}
|
| 178 |
+
|
| 179 |
+
exec_info_raw = os.environ.get("KUBERNETES_EXEC_INFO", "")
|
| 180 |
+
if not exec_info_raw:
|
| 181 |
+
# All kube clients should be setting this, but client-go clients
|
| 182 |
+
# (kubectl, kubelet, etc) < 1.20 were not setting this if the API
|
| 183 |
+
# version defined in the kubeconfig was not v1alpha1.
|
| 184 |
+
#
|
| 185 |
+
# This was changed in kubernetes/kubernetes#95489 so that
|
| 186 |
+
# KUBERNETES_EXEC_INFO is always provided
|
| 187 |
+
return fallback_api_version
|
| 188 |
+
try:
|
| 189 |
+
exec_info = json.loads(exec_info_raw)
|
| 190 |
+
except json.JSONDecodeError:
|
| 191 |
+
# The environment variable was malformed
|
| 192 |
+
uni_print(
|
| 193 |
+
ERROR_MSG_TPL.format(
|
| 194 |
+
error_prefixes["error"],
|
| 195 |
+
fallback_api_version,
|
| 196 |
+
),
|
| 197 |
+
sys.stderr,
|
| 198 |
+
)
|
| 199 |
+
uni_print("\n", sys.stderr)
|
| 200 |
+
return fallback_api_version
|
| 201 |
+
|
| 202 |
+
api_version_raw = exec_info.get("apiVersion")
|
| 203 |
+
if api_version_raw in FULLY_SUPPORTED_API_VERSIONS:
|
| 204 |
+
return api_version_raw
|
| 205 |
+
elif api_version_raw in DEPRECATED_API_VERSIONS:
|
| 206 |
+
uni_print(DEPRECATION_MSG_TPL.format(api_version_raw), sys.stderr)
|
| 207 |
+
uni_print("\n", sys.stderr)
|
| 208 |
+
return api_version_raw
|
| 209 |
+
else:
|
| 210 |
+
uni_print(
|
| 211 |
+
UNRECOGNIZED_MSG_TPL.format(fallback_api_version),
|
| 212 |
+
sys.stderr,
|
| 213 |
+
)
|
| 214 |
+
uni_print("\n", sys.stderr)
|
| 215 |
+
return fallback_api_version
|
| 216 |
+
|
| 217 |
+
|
| 218 |
+
class TokenGenerator(object):
|
| 219 |
+
def __init__(self, sts_client):
|
| 220 |
+
self._sts_client = sts_client
|
| 221 |
+
|
| 222 |
+
def get_token(self, k8s_aws_id):
|
| 223 |
+
"""Generate a presigned url token to pass to kubectl."""
|
| 224 |
+
url = self._get_presigned_url(k8s_aws_id)
|
| 225 |
+
token = TOKEN_PREFIX + base64.urlsafe_b64encode(
|
| 226 |
+
url.encode('utf-8')
|
| 227 |
+
).decode('utf-8').rstrip('=')
|
| 228 |
+
return token
|
| 229 |
+
|
| 230 |
+
def _get_presigned_url(self, k8s_aws_id):
|
| 231 |
+
return self._sts_client.generate_presigned_url(
|
| 232 |
+
'get_caller_identity',
|
| 233 |
+
Params={K8S_AWS_ID_HEADER: k8s_aws_id},
|
| 234 |
+
ExpiresIn=URL_TIMEOUT,
|
| 235 |
+
HttpMethod='GET',
|
| 236 |
+
)
|
| 237 |
+
|
| 238 |
+
|
| 239 |
+
class STSClientFactory(object):
|
| 240 |
+
def __init__(self, session):
|
| 241 |
+
self._session = session
|
| 242 |
+
|
| 243 |
+
def get_sts_client(self, region_name=None, role_arn=None):
|
| 244 |
+
client_kwargs = {'region_name': region_name}
|
| 245 |
+
if role_arn is not None:
|
| 246 |
+
creds = self._get_role_credentials(region_name, role_arn)
|
| 247 |
+
client_kwargs['aws_access_key_id'] = creds['AccessKeyId']
|
| 248 |
+
client_kwargs['aws_secret_access_key'] = creds['SecretAccessKey']
|
| 249 |
+
client_kwargs['aws_session_token'] = creds['SessionToken']
|
| 250 |
+
sts = self._session.create_client('sts', **client_kwargs)
|
| 251 |
+
self._register_k8s_aws_id_handlers(sts)
|
| 252 |
+
return sts
|
| 253 |
+
|
| 254 |
+
def _get_role_credentials(self, region_name, role_arn):
|
| 255 |
+
sts = self._session.create_client('sts', region_name)
|
| 256 |
+
return sts.assume_role(
|
| 257 |
+
RoleArn=role_arn, RoleSessionName='EKSGetTokenAuth'
|
| 258 |
+
)['Credentials']
|
| 259 |
+
|
| 260 |
+
def _register_k8s_aws_id_handlers(self, sts_client):
|
| 261 |
+
sts_client.meta.events.register(
|
| 262 |
+
'provide-client-params.sts.GetCallerIdentity',
|
| 263 |
+
self._retrieve_k8s_aws_id,
|
| 264 |
+
)
|
| 265 |
+
sts_client.meta.events.register(
|
| 266 |
+
'before-sign.sts.GetCallerIdentity',
|
| 267 |
+
self._inject_k8s_aws_id_header,
|
| 268 |
+
)
|
| 269 |
+
|
| 270 |
+
def _retrieve_k8s_aws_id(self, params, context, **kwargs):
|
| 271 |
+
if K8S_AWS_ID_HEADER in params:
|
| 272 |
+
context[K8S_AWS_ID_HEADER] = params.pop(K8S_AWS_ID_HEADER)
|
| 273 |
+
|
| 274 |
+
def _inject_k8s_aws_id_header(self, request, **kwargs):
|
| 275 |
+
if K8S_AWS_ID_HEADER in request.context:
|
| 276 |
+
request.headers[K8S_AWS_ID_HEADER] = request.context[K8S_AWS_ID_HEADER]
|
data/lib/python3.10/site-packages/awscli/customizations/eks/kubeconfig.py
ADDED
|
@@ -0,0 +1,281 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License"). You
|
| 4 |
+
# may not use this file except in compliance with the License. A copy of
|
| 5 |
+
# the License is located at
|
| 6 |
+
#
|
| 7 |
+
# http://aws.amazon.com/apache2.0/
|
| 8 |
+
#
|
| 9 |
+
# or in the "license" file accompanying this file. This file is
|
| 10 |
+
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
|
| 11 |
+
# ANY KIND, either express or implied. See the License for the specific
|
| 12 |
+
# language governing permissions and limitations under the License.
|
| 13 |
+
|
| 14 |
+
import os
|
| 15 |
+
import yaml
|
| 16 |
+
import logging
|
| 17 |
+
import errno
|
| 18 |
+
from botocore.compat import OrderedDict
|
| 19 |
+
|
| 20 |
+
from awscli.customizations.eks.exceptions import EKSError
|
| 21 |
+
from awscli.customizations.eks.ordered_yaml import (ordered_yaml_load,
|
| 22 |
+
ordered_yaml_dump)
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
class KubeconfigError(EKSError):
|
| 26 |
+
""" Base class for all kubeconfig errors."""
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
class KubeconfigCorruptedError(KubeconfigError):
|
| 30 |
+
""" Raised when a kubeconfig cannot be parsed."""
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
class KubeconfigInaccessableError(KubeconfigError):
|
| 34 |
+
""" Raised when a kubeconfig cannot be opened for read/writing."""
|
| 35 |
+
|
| 36 |
+
|
| 37 |
+
def _get_new_kubeconfig_content():
|
| 38 |
+
return OrderedDict([
|
| 39 |
+
("apiVersion", "v1"),
|
| 40 |
+
("clusters", []),
|
| 41 |
+
("contexts", []),
|
| 42 |
+
("current-context", ""),
|
| 43 |
+
("kind", "Config"),
|
| 44 |
+
("preferences", OrderedDict()),
|
| 45 |
+
("users", [])
|
| 46 |
+
])
|
| 47 |
+
|
| 48 |
+
|
| 49 |
+
class Kubeconfig(object):
|
| 50 |
+
def __init__(self, path, content=None):
|
| 51 |
+
self.path = path
|
| 52 |
+
if content is None:
|
| 53 |
+
content = _get_new_kubeconfig_content()
|
| 54 |
+
self.content = content
|
| 55 |
+
|
| 56 |
+
def dump_content(self):
|
| 57 |
+
""" Return the stored content in yaml format. """
|
| 58 |
+
return ordered_yaml_dump(self.content)
|
| 59 |
+
|
| 60 |
+
def has_cluster(self, name):
|
| 61 |
+
"""
|
| 62 |
+
Return true if this kubeconfig contains an entry
|
| 63 |
+
For the passed cluster name.
|
| 64 |
+
"""
|
| 65 |
+
if self.content.get('clusters') is None:
|
| 66 |
+
return False
|
| 67 |
+
return name in [cluster['name']
|
| 68 |
+
for cluster in self.content['clusters'] if 'name' in cluster]
|
| 69 |
+
|
| 70 |
+
def __eq__(self, other):
|
| 71 |
+
return (
|
| 72 |
+
isinstance(other, Kubeconfig)
|
| 73 |
+
and self.path == other.path
|
| 74 |
+
and self.content == other.content
|
| 75 |
+
)
|
| 76 |
+
|
| 77 |
+
|
| 78 |
+
class KubeconfigValidator(object):
|
| 79 |
+
def __init__(self):
|
| 80 |
+
# Validation_content is an empty Kubeconfig
|
| 81 |
+
# It is used as a way to know what types different entries should be
|
| 82 |
+
self._validation_content = Kubeconfig(None, None).content
|
| 83 |
+
|
| 84 |
+
def validate_config(self, config):
|
| 85 |
+
"""
|
| 86 |
+
Raises KubeconfigCorruptedError if the passed content is invalid
|
| 87 |
+
|
| 88 |
+
:param config: The config to validate
|
| 89 |
+
:type config: Kubeconfig
|
| 90 |
+
"""
|
| 91 |
+
if not isinstance(config, Kubeconfig):
|
| 92 |
+
raise KubeconfigCorruptedError("Internal error: "
|
| 93 |
+
f"Not a {Kubeconfig}.")
|
| 94 |
+
self._validate_config_types(config)
|
| 95 |
+
self._validate_list_entry_types(config)
|
| 96 |
+
|
| 97 |
+
def _validate_config_types(self, config):
|
| 98 |
+
"""
|
| 99 |
+
Raises KubeconfigCorruptedError if any of the entries in config
|
| 100 |
+
are the wrong type
|
| 101 |
+
|
| 102 |
+
:param config: The config to validate
|
| 103 |
+
:type config: Kubeconfig
|
| 104 |
+
"""
|
| 105 |
+
if not isinstance(config.content, dict):
|
| 106 |
+
raise KubeconfigCorruptedError(f"Content not a {dict}.")
|
| 107 |
+
for key, value in self._validation_content.items():
|
| 108 |
+
if (key in config.content and
|
| 109 |
+
config.content[key] is not None and
|
| 110 |
+
not isinstance(config.content[key], type(value))):
|
| 111 |
+
raise KubeconfigCorruptedError(
|
| 112 |
+
f"{key} is wrong type: {type(config.content[key])} "
|
| 113 |
+
f"(Should be {type(value)})"
|
| 114 |
+
)
|
| 115 |
+
|
| 116 |
+
def _validate_list_entry_types(self, config):
|
| 117 |
+
"""
|
| 118 |
+
Raises KubeconfigCorruptedError if any lists in config contain objects
|
| 119 |
+
which are not dictionaries
|
| 120 |
+
|
| 121 |
+
:param config: The config to validate
|
| 122 |
+
:type config: Kubeconfig
|
| 123 |
+
"""
|
| 124 |
+
for key, value in self._validation_content.items():
|
| 125 |
+
if (key in config.content and
|
| 126 |
+
type(config.content[key]) == list):
|
| 127 |
+
for element in config.content[key]:
|
| 128 |
+
if not isinstance(element, OrderedDict):
|
| 129 |
+
raise KubeconfigCorruptedError(
|
| 130 |
+
f"Entry in {key} not a {dict}. ")
|
| 131 |
+
|
| 132 |
+
|
| 133 |
+
class KubeconfigLoader(object):
|
| 134 |
+
def __init__(self, validator = None):
|
| 135 |
+
if validator is None:
|
| 136 |
+
validator=KubeconfigValidator()
|
| 137 |
+
self._validator=validator
|
| 138 |
+
|
| 139 |
+
def load_kubeconfig(self, path):
|
| 140 |
+
"""
|
| 141 |
+
Loads the kubeconfig found at the given path.
|
| 142 |
+
If no file is found at the given path,
|
| 143 |
+
Generate a new kubeconfig to write back.
|
| 144 |
+
If the kubeconfig is valid, loads the content from it.
|
| 145 |
+
If the kubeconfig is invalid, throw the relevant exception.
|
| 146 |
+
|
| 147 |
+
:param path: The path to load a kubeconfig from
|
| 148 |
+
:type path: string
|
| 149 |
+
|
| 150 |
+
:raises KubeconfigInaccessableError: if the kubeconfig can't be opened
|
| 151 |
+
:raises KubeconfigCorruptedError: if the kubeconfig is invalid
|
| 152 |
+
|
| 153 |
+
:return: The loaded kubeconfig
|
| 154 |
+
:rtype: Kubeconfig
|
| 155 |
+
"""
|
| 156 |
+
try:
|
| 157 |
+
with open(path, "r") as stream:
|
| 158 |
+
loaded_content=ordered_yaml_load(stream)
|
| 159 |
+
except IOError as e:
|
| 160 |
+
if e.errno == errno.ENOENT:
|
| 161 |
+
loaded_content=None
|
| 162 |
+
else:
|
| 163 |
+
raise KubeconfigInaccessableError(
|
| 164 |
+
f"Can't open kubeconfig for reading: {e}")
|
| 165 |
+
except yaml.YAMLError as e:
|
| 166 |
+
raise KubeconfigCorruptedError(
|
| 167 |
+
f"YamlError while loading kubeconfig: {e}")
|
| 168 |
+
|
| 169 |
+
loaded_config=Kubeconfig(path, loaded_content)
|
| 170 |
+
self._validator.validate_config(loaded_config)
|
| 171 |
+
|
| 172 |
+
return loaded_config
|
| 173 |
+
|
| 174 |
+
|
| 175 |
+
class KubeconfigWriter(object):
|
| 176 |
+
def write_kubeconfig(self, config):
|
| 177 |
+
"""
|
| 178 |
+
Write config to disk.
|
| 179 |
+
OK if the file doesn't exist.
|
| 180 |
+
|
| 181 |
+
:param config: The kubeconfig to write
|
| 182 |
+
:type config: Kubeconfig
|
| 183 |
+
|
| 184 |
+
:raises KubeconfigInaccessableError: if the kubeconfig
|
| 185 |
+
can't be opened for writing
|
| 186 |
+
"""
|
| 187 |
+
directory=os.path.dirname(config.path)
|
| 188 |
+
|
| 189 |
+
try:
|
| 190 |
+
os.makedirs(directory)
|
| 191 |
+
except OSError as e:
|
| 192 |
+
if e.errno != errno.EEXIST:
|
| 193 |
+
raise KubeconfigInaccessableError(
|
| 194 |
+
f"Can't create directory for writing: {e}")
|
| 195 |
+
try:
|
| 196 |
+
with os.fdopen(
|
| 197 |
+
os.open(
|
| 198 |
+
config.path,
|
| 199 |
+
os.O_CREAT | os.O_RDWR | os.O_TRUNC,
|
| 200 |
+
0o600),
|
| 201 |
+
"w+") as stream:
|
| 202 |
+
ordered_yaml_dump(config.content, stream)
|
| 203 |
+
except (IOError, OSError) as e:
|
| 204 |
+
raise KubeconfigInaccessableError(
|
| 205 |
+
f"Can't open kubeconfig for writing: {e}")
|
| 206 |
+
|
| 207 |
+
|
| 208 |
+
class KubeconfigAppender(object):
|
| 209 |
+
def insert_entry(self, config, key, new_entry):
|
| 210 |
+
"""
|
| 211 |
+
Insert entry into the entries list at content[key]
|
| 212 |
+
Overwrite an existing entry if they share the same name
|
| 213 |
+
|
| 214 |
+
:param config: The kubeconfig to insert an entry into
|
| 215 |
+
:type config: Kubeconfig
|
| 216 |
+
"""
|
| 217 |
+
entries=self._setdefault_existing_entries(config, key)
|
| 218 |
+
same_name_index=self._index_same_name(entries, new_entry)
|
| 219 |
+
if same_name_index is None:
|
| 220 |
+
entries.append(new_entry)
|
| 221 |
+
else:
|
| 222 |
+
entries[same_name_index]=new_entry
|
| 223 |
+
return config
|
| 224 |
+
|
| 225 |
+
def _setdefault_existing_entries(self, config, key):
|
| 226 |
+
config.content[key]=config.content.get(key) or []
|
| 227 |
+
entries=config.content[key]
|
| 228 |
+
if not isinstance(entries, list):
|
| 229 |
+
raise KubeconfigError(f"Tried to insert into {key}, "
|
| 230 |
+
f"which is a {type(entries)} "
|
| 231 |
+
f"not a {list}")
|
| 232 |
+
return entries
|
| 233 |
+
|
| 234 |
+
def _index_same_name(self, entries, new_entry):
|
| 235 |
+
if "name" in new_entry:
|
| 236 |
+
name_to_search=new_entry["name"]
|
| 237 |
+
for i, entry in enumerate(entries):
|
| 238 |
+
if "name" in entry and entry["name"] == name_to_search:
|
| 239 |
+
return i
|
| 240 |
+
return None
|
| 241 |
+
|
| 242 |
+
def _make_context(self, cluster, user, alias = None):
|
| 243 |
+
""" Generate a context to associate cluster and user with a given alias."""
|
| 244 |
+
return OrderedDict([
|
| 245 |
+
("context", OrderedDict([
|
| 246 |
+
("cluster", cluster["name"]),
|
| 247 |
+
("user", user["name"])
|
| 248 |
+
])),
|
| 249 |
+
("name", alias or user["name"])
|
| 250 |
+
])
|
| 251 |
+
|
| 252 |
+
def insert_cluster_user_pair(self, config, cluster, user, alias = None):
|
| 253 |
+
"""
|
| 254 |
+
Insert the passed cluster entry and user entry,
|
| 255 |
+
then make a context to associate them
|
| 256 |
+
and set current-context to be the new context.
|
| 257 |
+
Returns the new context
|
| 258 |
+
|
| 259 |
+
:param config: the Kubeconfig to insert the pair into
|
| 260 |
+
:type config: Kubeconfig
|
| 261 |
+
|
| 262 |
+
:param cluster: the cluster entry
|
| 263 |
+
:type cluster: OrderedDict
|
| 264 |
+
|
| 265 |
+
:param user: the user entry
|
| 266 |
+
:type user: OrderedDict
|
| 267 |
+
|
| 268 |
+
:param alias: the alias for the context; defaults top user entry name
|
| 269 |
+
:type context: str
|
| 270 |
+
|
| 271 |
+
:return: The generated context
|
| 272 |
+
:rtype: OrderedDict
|
| 273 |
+
"""
|
| 274 |
+
context=self._make_context(cluster, user, alias = alias)
|
| 275 |
+
self.insert_entry(config, "clusters", cluster)
|
| 276 |
+
self.insert_entry(config, "users", user)
|
| 277 |
+
self.insert_entry(config, "contexts", context)
|
| 278 |
+
|
| 279 |
+
config.content["current-context"]=context["name"]
|
| 280 |
+
|
| 281 |
+
return context
|
data/lib/python3.10/site-packages/awscli/customizations/eks/ordered_yaml.py
ADDED
|
@@ -0,0 +1,62 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License"). You
|
| 4 |
+
# may not use this file except in compliance with the License. A copy of
|
| 5 |
+
# the License is located at
|
| 6 |
+
#
|
| 7 |
+
# http://aws.amazon.com/apache2.0/
|
| 8 |
+
#
|
| 9 |
+
# or in the "license" file accompanying this file. This file is
|
| 10 |
+
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
|
| 11 |
+
# ANY KIND, either express or implied. See the License for the specific
|
| 12 |
+
# language governing permissions and limitations under the License.
|
| 13 |
+
|
| 14 |
+
import yaml
|
| 15 |
+
from botocore.compat import OrderedDict
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
class SafeOrderedLoader(yaml.SafeLoader):
|
| 19 |
+
""" Safely load a yaml file into an OrderedDict."""
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
class SafeOrderedDumper(yaml.SafeDumper):
|
| 23 |
+
""" Safely dump an OrderedDict as yaml."""
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
def _ordered_constructor(loader, node):
|
| 27 |
+
loader.flatten_mapping(node)
|
| 28 |
+
return OrderedDict(loader.construct_pairs(node))
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
SafeOrderedLoader.add_constructor(
|
| 32 |
+
yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG,
|
| 33 |
+
_ordered_constructor)
|
| 34 |
+
|
| 35 |
+
|
| 36 |
+
def _ordered_representer(dumper, data):
|
| 37 |
+
return dumper.represent_mapping(
|
| 38 |
+
yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG,
|
| 39 |
+
data.items())
|
| 40 |
+
|
| 41 |
+
|
| 42 |
+
SafeOrderedDumper.add_representer(OrderedDict, _ordered_representer)
|
| 43 |
+
|
| 44 |
+
|
| 45 |
+
def ordered_yaml_load(stream):
|
| 46 |
+
""" Load an OrderedDict object from a yaml stream."""
|
| 47 |
+
return yaml.load(stream, SafeOrderedLoader)
|
| 48 |
+
|
| 49 |
+
|
| 50 |
+
def ordered_yaml_dump(to_dump, stream=None):
|
| 51 |
+
"""
|
| 52 |
+
Dump an OrderedDict object to yaml.
|
| 53 |
+
|
| 54 |
+
:param to_dump: The OrderedDict to dump
|
| 55 |
+
:type to_dump: OrderedDict
|
| 56 |
+
|
| 57 |
+
:param stream: The file to dump to
|
| 58 |
+
If not given or if None, only return the value
|
| 59 |
+
:type stream: file
|
| 60 |
+
"""
|
| 61 |
+
return yaml.dump(to_dump, stream,
|
| 62 |
+
SafeOrderedDumper, default_flow_style=False)
|
data/lib/python3.10/site-packages/awscli/customizations/eks/update_kubeconfig.py
ADDED
|
@@ -0,0 +1,341 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
| 2 |
+
#
|
| 3 |
+
# Licensed under the Apache License, Version 2.0 (the "License"). You
|
| 4 |
+
# may not use this file except in compliance with the License. A copy of
|
| 5 |
+
# the License is located at
|
| 6 |
+
#
|
| 7 |
+
# http://aws.amazon.com/apache2.0/
|
| 8 |
+
#
|
| 9 |
+
# or in the "license" file accompanying this file. This file is
|
| 10 |
+
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
|
| 11 |
+
# ANY KIND, either express or implied. See the License for the specific
|
| 12 |
+
# language governing permissions and limitations under the License.
|
| 13 |
+
|
| 14 |
+
import os
|
| 15 |
+
import logging
|
| 16 |
+
|
| 17 |
+
from botocore.compat import OrderedDict
|
| 18 |
+
|
| 19 |
+
from awscli.customizations.commands import BasicCommand
|
| 20 |
+
from awscli.customizations.utils import uni_print
|
| 21 |
+
from awscli.customizations.eks.exceptions import EKSClusterError
|
| 22 |
+
from awscli.customizations.eks.kubeconfig import (Kubeconfig,
|
| 23 |
+
KubeconfigError,
|
| 24 |
+
KubeconfigLoader,
|
| 25 |
+
KubeconfigWriter,
|
| 26 |
+
KubeconfigValidator,
|
| 27 |
+
KubeconfigAppender)
|
| 28 |
+
from awscli.customizations.eks.ordered_yaml import ordered_yaml_dump
|
| 29 |
+
|
| 30 |
+
LOG = logging.getLogger(__name__)
|
| 31 |
+
|
| 32 |
+
DEFAULT_PATH = os.path.expanduser("~/.kube/config")
|
| 33 |
+
|
| 34 |
+
# At the time EKS no longer supports Kubernetes v1.21 (probably ~Dec 2023),
|
| 35 |
+
# this can be safely changed to default to writing "v1"
|
| 36 |
+
API_VERSION = "client.authentication.k8s.io/v1beta1"
|
| 37 |
+
|
| 38 |
+
class UpdateKubeconfigCommand(BasicCommand):
|
| 39 |
+
NAME = 'update-kubeconfig'
|
| 40 |
+
|
| 41 |
+
DESCRIPTION = BasicCommand.FROM_FILE(
|
| 42 |
+
'eks',
|
| 43 |
+
'update-kubeconfig',
|
| 44 |
+
'_description.rst'
|
| 45 |
+
)
|
| 46 |
+
|
| 47 |
+
ARG_TABLE = [
|
| 48 |
+
{
|
| 49 |
+
'name': 'name',
|
| 50 |
+
'dest': 'cluster_name',
|
| 51 |
+
'help_text': ("The name of the cluster for which "
|
| 52 |
+
"to create a kubeconfig entry. "
|
| 53 |
+
"This cluster must exist in your account and in the "
|
| 54 |
+
"specified or configured default Region "
|
| 55 |
+
"for your AWS CLI installation."),
|
| 56 |
+
'required': True
|
| 57 |
+
},
|
| 58 |
+
{
|
| 59 |
+
'name': 'kubeconfig',
|
| 60 |
+
'help_text': ("Optionally specify a kubeconfig file to append "
|
| 61 |
+
"with your configuration. "
|
| 62 |
+
"By default, the configuration is written to the "
|
| 63 |
+
"first file path in the KUBECONFIG "
|
| 64 |
+
"environment variable (if it is set) "
|
| 65 |
+
"or the default kubeconfig path (.kube/config) "
|
| 66 |
+
"in your home directory."),
|
| 67 |
+
'required': False
|
| 68 |
+
},
|
| 69 |
+
{
|
| 70 |
+
'name': 'role-arn',
|
| 71 |
+
'help_text': ("To assume a role for cluster authentication, "
|
| 72 |
+
"specify an IAM role ARN with this option. "
|
| 73 |
+
"For example, if you created a cluster "
|
| 74 |
+
"while assuming an IAM role, "
|
| 75 |
+
"then you must also assume that role to "
|
| 76 |
+
"connect to the cluster the first time."),
|
| 77 |
+
'required': False
|
| 78 |
+
},
|
| 79 |
+
{
|
| 80 |
+
'name': 'dry-run',
|
| 81 |
+
'action': 'store_true',
|
| 82 |
+
'default': False,
|
| 83 |
+
'help_text': ("Print the merged kubeconfig to stdout instead of "
|
| 84 |
+
"writing it to the specified file."),
|
| 85 |
+
'required': False
|
| 86 |
+
},
|
| 87 |
+
{
|
| 88 |
+
'name': 'verbose',
|
| 89 |
+
'action': 'store_true',
|
| 90 |
+
'default': False,
|
| 91 |
+
'help_text': ("Print more detailed output "
|
| 92 |
+
"when writing to the kubeconfig file, "
|
| 93 |
+
"including the appended entries.")
|
| 94 |
+
},
|
| 95 |
+
{
|
| 96 |
+
'name': 'alias',
|
| 97 |
+
'help_text': ("Alias for the cluster context name. "
|
| 98 |
+
"Defaults to match cluster ARN."),
|
| 99 |
+
'required': False
|
| 100 |
+
},
|
| 101 |
+
{
|
| 102 |
+
'name': 'user-alias',
|
| 103 |
+
'help_text': ("Alias for the generated user name. "
|
| 104 |
+
"Defaults to match cluster ARN."),
|
| 105 |
+
'required': False
|
| 106 |
+
}
|
| 107 |
+
]
|
| 108 |
+
|
| 109 |
+
def _display_entries(self, entries):
|
| 110 |
+
"""
|
| 111 |
+
Display entries in yaml format
|
| 112 |
+
|
| 113 |
+
:param entries: a list of OrderedDicts to be printed
|
| 114 |
+
:type entries: list
|
| 115 |
+
"""
|
| 116 |
+
uni_print("Entries:\n\n")
|
| 117 |
+
for entry in entries:
|
| 118 |
+
uni_print(ordered_yaml_dump(entry))
|
| 119 |
+
uni_print("\n")
|
| 120 |
+
|
| 121 |
+
def _run_main(self, parsed_args, parsed_globals):
|
| 122 |
+
client = EKSClient(self._session,
|
| 123 |
+
parsed_args=parsed_args,
|
| 124 |
+
parsed_globals=parsed_globals)
|
| 125 |
+
new_cluster_dict = client.get_cluster_entry()
|
| 126 |
+
new_user_dict = client.get_user_entry(user_alias=parsed_args.user_alias)
|
| 127 |
+
|
| 128 |
+
config_selector = KubeconfigSelector(
|
| 129 |
+
os.environ.get("KUBECONFIG", ""),
|
| 130 |
+
parsed_args.kubeconfig
|
| 131 |
+
)
|
| 132 |
+
config = config_selector.choose_kubeconfig(
|
| 133 |
+
new_cluster_dict["name"]
|
| 134 |
+
)
|
| 135 |
+
updating_existing = config.has_cluster(new_cluster_dict["name"])
|
| 136 |
+
appender = KubeconfigAppender()
|
| 137 |
+
new_context_dict = appender.insert_cluster_user_pair(config,
|
| 138 |
+
new_cluster_dict,
|
| 139 |
+
new_user_dict,
|
| 140 |
+
parsed_args.alias)
|
| 141 |
+
|
| 142 |
+
if parsed_args.dry_run:
|
| 143 |
+
uni_print(config.dump_content())
|
| 144 |
+
else:
|
| 145 |
+
writer = KubeconfigWriter()
|
| 146 |
+
writer.write_kubeconfig(config)
|
| 147 |
+
|
| 148 |
+
if updating_existing:
|
| 149 |
+
uni_print("Updated context {0} in {1}\n".format(
|
| 150 |
+
new_context_dict["name"], config.path
|
| 151 |
+
))
|
| 152 |
+
else:
|
| 153 |
+
uni_print("Added new context {0} to {1}\n".format(
|
| 154 |
+
new_context_dict["name"], config.path
|
| 155 |
+
))
|
| 156 |
+
|
| 157 |
+
if parsed_args.verbose:
|
| 158 |
+
self._display_entries([
|
| 159 |
+
new_context_dict,
|
| 160 |
+
new_user_dict,
|
| 161 |
+
new_cluster_dict
|
| 162 |
+
])
|
| 163 |
+
|
| 164 |
+
|
| 165 |
+
|
| 166 |
+
class KubeconfigSelector(object):
|
| 167 |
+
|
| 168 |
+
def __init__(self, env_variable, path_in, validator=None,
|
| 169 |
+
loader=None):
|
| 170 |
+
"""
|
| 171 |
+
Parse KUBECONFIG into a list of absolute paths.
|
| 172 |
+
Also replace the empty list with DEFAULT_PATH
|
| 173 |
+
|
| 174 |
+
:param env_variable: KUBECONFIG as a long string
|
| 175 |
+
:type env_variable: string
|
| 176 |
+
|
| 177 |
+
:param path_in: The path passed in through the CLI
|
| 178 |
+
:type path_in: string or None
|
| 179 |
+
"""
|
| 180 |
+
if validator is None:
|
| 181 |
+
validator = KubeconfigValidator()
|
| 182 |
+
self._validator = validator
|
| 183 |
+
|
| 184 |
+
if loader is None:
|
| 185 |
+
loader = KubeconfigLoader(validator)
|
| 186 |
+
self._loader = loader
|
| 187 |
+
|
| 188 |
+
if path_in is not None:
|
| 189 |
+
# Override environment variable
|
| 190 |
+
self._paths = [self._expand_path(path_in)]
|
| 191 |
+
else:
|
| 192 |
+
# Get the list of paths from the environment variable
|
| 193 |
+
if env_variable == "":
|
| 194 |
+
env_variable = DEFAULT_PATH
|
| 195 |
+
self._paths = [self._expand_path(element)
|
| 196 |
+
for element in env_variable.split(os.pathsep)
|
| 197 |
+
if len(element.strip()) > 0]
|
| 198 |
+
if len(self._paths) == 0:
|
| 199 |
+
self._paths = [DEFAULT_PATH]
|
| 200 |
+
|
| 201 |
+
def choose_kubeconfig(self, cluster_name):
|
| 202 |
+
"""
|
| 203 |
+
Choose which kubeconfig file to read from.
|
| 204 |
+
If name is already an entry in one of the $KUBECONFIG files,
|
| 205 |
+
choose that one.
|
| 206 |
+
Otherwise choose the first file.
|
| 207 |
+
|
| 208 |
+
:param cluster_name: The name of the cluster which is going to be added
|
| 209 |
+
:type cluster_name: String
|
| 210 |
+
|
| 211 |
+
:return: a chosen Kubeconfig based on above rules
|
| 212 |
+
:rtype: Kubeconfig
|
| 213 |
+
"""
|
| 214 |
+
# Search for an existing entry to update
|
| 215 |
+
for candidate_path in self._paths:
|
| 216 |
+
try:
|
| 217 |
+
loaded_config = self._loader.load_kubeconfig(candidate_path)
|
| 218 |
+
|
| 219 |
+
if loaded_config.has_cluster(cluster_name):
|
| 220 |
+
LOG.debug("Found entry to update at {0}".format(
|
| 221 |
+
candidate_path
|
| 222 |
+
))
|
| 223 |
+
return loaded_config
|
| 224 |
+
except KubeconfigError as e:
|
| 225 |
+
LOG.warning("Passing {0}:{1}".format(candidate_path, e))
|
| 226 |
+
|
| 227 |
+
# No entry was found, use the first file in KUBECONFIG
|
| 228 |
+
#
|
| 229 |
+
# Note: This could raise KubeconfigErrors if paths[0] is corrupted
|
| 230 |
+
return self._loader.load_kubeconfig(self._paths[0])
|
| 231 |
+
|
| 232 |
+
def _expand_path(self, path):
|
| 233 |
+
""" A helper to expand a path to a full absolute path. """
|
| 234 |
+
return os.path.abspath(os.path.expanduser(path))
|
| 235 |
+
|
| 236 |
+
|
| 237 |
+
class EKSClient(object):
|
| 238 |
+
def __init__(self, session, parsed_args, parsed_globals=None):
|
| 239 |
+
self._session = session
|
| 240 |
+
self._cluster_name = parsed_args.cluster_name
|
| 241 |
+
self._cluster_description = None
|
| 242 |
+
self._parsed_globals = parsed_globals
|
| 243 |
+
self._parsed_args = parsed_args
|
| 244 |
+
|
| 245 |
+
@property
|
| 246 |
+
def cluster_description(self):
|
| 247 |
+
"""
|
| 248 |
+
Use an eks describe-cluster call to get the cluster description
|
| 249 |
+
Cache the response in self._cluster_description.
|
| 250 |
+
describe-cluster will only be called once.
|
| 251 |
+
"""
|
| 252 |
+
if self._cluster_description is None:
|
| 253 |
+
if self._parsed_globals is None:
|
| 254 |
+
client = self._session.create_client("eks")
|
| 255 |
+
else:
|
| 256 |
+
client = self._session.create_client(
|
| 257 |
+
"eks",
|
| 258 |
+
region_name=self._parsed_globals.region,
|
| 259 |
+
endpoint_url=self._parsed_globals.endpoint_url,
|
| 260 |
+
verify=self._parsed_globals.verify_ssl
|
| 261 |
+
)
|
| 262 |
+
full_description = client.describe_cluster(name=self._cluster_name)
|
| 263 |
+
self._cluster_description = full_description["cluster"]
|
| 264 |
+
|
| 265 |
+
if "status" not in self._cluster_description:
|
| 266 |
+
raise EKSClusterError("Cluster not found")
|
| 267 |
+
if self._cluster_description["status"] not in ["ACTIVE", "UPDATING"]:
|
| 268 |
+
raise EKSClusterError("Cluster status is {0}".format(
|
| 269 |
+
self._cluster_description["status"]
|
| 270 |
+
))
|
| 271 |
+
|
| 272 |
+
return self._cluster_description
|
| 273 |
+
|
| 274 |
+
def get_cluster_entry(self):
|
| 275 |
+
"""
|
| 276 |
+
Return a cluster entry generated using
|
| 277 |
+
the previously obtained description.
|
| 278 |
+
"""
|
| 279 |
+
|
| 280 |
+
cert_data = self.cluster_description.get("certificateAuthority", {}).get("data", "")
|
| 281 |
+
endpoint = self.cluster_description.get("endpoint")
|
| 282 |
+
arn = self.cluster_description.get("arn")
|
| 283 |
+
|
| 284 |
+
return OrderedDict([
|
| 285 |
+
("cluster", OrderedDict([
|
| 286 |
+
("certificate-authority-data", cert_data),
|
| 287 |
+
("server", endpoint)
|
| 288 |
+
])),
|
| 289 |
+
("name", arn)
|
| 290 |
+
])
|
| 291 |
+
|
| 292 |
+
def get_user_entry(self, user_alias=None):
|
| 293 |
+
"""
|
| 294 |
+
Return a user entry generated using
|
| 295 |
+
the previously obtained description.
|
| 296 |
+
"""
|
| 297 |
+
region = self.cluster_description.get("arn").split(":")[3]
|
| 298 |
+
outpost_config = self.cluster_description.get("outpostConfig")
|
| 299 |
+
|
| 300 |
+
if outpost_config is None:
|
| 301 |
+
cluster_identification_parameter = "--cluster-name"
|
| 302 |
+
cluster_identification_value = self._cluster_name
|
| 303 |
+
else:
|
| 304 |
+
# If cluster contains outpostConfig, use id for identification
|
| 305 |
+
cluster_identification_parameter = "--cluster-id"
|
| 306 |
+
cluster_identification_value = self.cluster_description.get("id")
|
| 307 |
+
|
| 308 |
+
generated_user = OrderedDict([
|
| 309 |
+
("name", user_alias or self.cluster_description.get("arn", "")),
|
| 310 |
+
("user", OrderedDict([
|
| 311 |
+
("exec", OrderedDict([
|
| 312 |
+
("apiVersion", API_VERSION),
|
| 313 |
+
("args",
|
| 314 |
+
[
|
| 315 |
+
"--region",
|
| 316 |
+
region,
|
| 317 |
+
"eks",
|
| 318 |
+
"get-token",
|
| 319 |
+
cluster_identification_parameter,
|
| 320 |
+
cluster_identification_value,
|
| 321 |
+
"--output",
|
| 322 |
+
"json",
|
| 323 |
+
]),
|
| 324 |
+
("command", "aws"),
|
| 325 |
+
]))
|
| 326 |
+
]))
|
| 327 |
+
])
|
| 328 |
+
|
| 329 |
+
if self._parsed_args.role_arn is not None:
|
| 330 |
+
generated_user["user"]["exec"]["args"].extend([
|
| 331 |
+
"--role",
|
| 332 |
+
self._parsed_args.role_arn
|
| 333 |
+
])
|
| 334 |
+
|
| 335 |
+
if self._session.profile:
|
| 336 |
+
generated_user["user"]["exec"]["env"] = [OrderedDict([
|
| 337 |
+
("name", "AWS_PROFILE"),
|
| 338 |
+
("value", self._session.profile)
|
| 339 |
+
])]
|
| 340 |
+
|
| 341 |
+
return generated_user
|
data/lib/python3.10/site-packages/awscli/customizations/emr/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (175 Bytes). View file
|
|
|
data/lib/python3.10/site-packages/awscli/customizations/emr/__pycache__/addinstancegroups.cpython-310.pyc
ADDED
|
Binary file (1.81 kB). View file
|
|
|