repo
stringlengths
7
55
path
stringlengths
4
223
func_name
stringlengths
1
134
original_string
stringlengths
75
104k
language
stringclasses
1 value
code
stringlengths
75
104k
code_tokens
listlengths
19
28.4k
docstring
stringlengths
1
46.9k
docstring_tokens
listlengths
1
1.97k
sha
stringlengths
40
40
url
stringlengths
87
315
partition
stringclasses
1 value
keras-rl/keras-rl
rl/memory.py
Memory.get_recent_state
def get_recent_state(self, current_observation): """Return list of last observations # Argument current_observation (object): Last observation # Returns A list of the last observations """ # This code is slightly complicated by the fact that subsequent observations might be # from different episodes. We ensure that an experience never spans multiple episodes. # This is probably not that important in practice but it seems cleaner. state = [current_observation] idx = len(self.recent_observations) - 1 for offset in range(0, self.window_length - 1): current_idx = idx - offset current_terminal = self.recent_terminals[current_idx - 1] if current_idx - 1 >= 0 else False if current_idx < 0 or (not self.ignore_episode_boundaries and current_terminal): # The previously handled observation was terminal, don't add the current one. # Otherwise we would leak into a different episode. break state.insert(0, self.recent_observations[current_idx]) while len(state) < self.window_length: state.insert(0, zeroed_observation(state[0])) return state
python
def get_recent_state(self, current_observation): """Return list of last observations # Argument current_observation (object): Last observation # Returns A list of the last observations """ # This code is slightly complicated by the fact that subsequent observations might be # from different episodes. We ensure that an experience never spans multiple episodes. # This is probably not that important in practice but it seems cleaner. state = [current_observation] idx = len(self.recent_observations) - 1 for offset in range(0, self.window_length - 1): current_idx = idx - offset current_terminal = self.recent_terminals[current_idx - 1] if current_idx - 1 >= 0 else False if current_idx < 0 or (not self.ignore_episode_boundaries and current_terminal): # The previously handled observation was terminal, don't add the current one. # Otherwise we would leak into a different episode. break state.insert(0, self.recent_observations[current_idx]) while len(state) < self.window_length: state.insert(0, zeroed_observation(state[0])) return state
[ "def", "get_recent_state", "(", "self", ",", "current_observation", ")", ":", "# This code is slightly complicated by the fact that subsequent observations might be", "# from different episodes. We ensure that an experience never spans multiple episodes.", "# This is probably not that important in practice but it seems cleaner.", "state", "=", "[", "current_observation", "]", "idx", "=", "len", "(", "self", ".", "recent_observations", ")", "-", "1", "for", "offset", "in", "range", "(", "0", ",", "self", ".", "window_length", "-", "1", ")", ":", "current_idx", "=", "idx", "-", "offset", "current_terminal", "=", "self", ".", "recent_terminals", "[", "current_idx", "-", "1", "]", "if", "current_idx", "-", "1", ">=", "0", "else", "False", "if", "current_idx", "<", "0", "or", "(", "not", "self", ".", "ignore_episode_boundaries", "and", "current_terminal", ")", ":", "# The previously handled observation was terminal, don't add the current one.", "# Otherwise we would leak into a different episode.", "break", "state", ".", "insert", "(", "0", ",", "self", ".", "recent_observations", "[", "current_idx", "]", ")", "while", "len", "(", "state", ")", "<", "self", ".", "window_length", ":", "state", ".", "insert", "(", "0", ",", "zeroed_observation", "(", "state", "[", "0", "]", ")", ")", "return", "state" ]
Return list of last observations # Argument current_observation (object): Last observation # Returns A list of the last observations
[ "Return", "list", "of", "last", "observations" ]
e6efb0d8297ec38d704a3110b5d6ed74d09a05e3
https://github.com/keras-rl/keras-rl/blob/e6efb0d8297ec38d704a3110b5d6ed74d09a05e3/rl/memory.py#L120-L144
train
keras-rl/keras-rl
rl/memory.py
SequentialMemory.sample
def sample(self, batch_size, batch_idxs=None): """Return a randomized batch of experiences # Argument batch_size (int): Size of the all batch batch_idxs (int): Indexes to extract # Returns A list of experiences randomly selected """ # It is not possible to tell whether the first state in the memory is terminal, because it # would require access to the "terminal" flag associated to the previous state. As a result # we will never return this first state (only using `self.terminals[0]` to know whether the # second state is terminal). # In addition we need enough entries to fill the desired window length. assert self.nb_entries >= self.window_length + 2, 'not enough entries in the memory' if batch_idxs is None: # Draw random indexes such that we have enough entries before each index to fill the # desired window length. batch_idxs = sample_batch_indexes( self.window_length, self.nb_entries - 1, size=batch_size) batch_idxs = np.array(batch_idxs) + 1 assert np.min(batch_idxs) >= self.window_length + 1 assert np.max(batch_idxs) < self.nb_entries assert len(batch_idxs) == batch_size # Create experiences experiences = [] for idx in batch_idxs: terminal0 = self.terminals[idx - 2] while terminal0: # Skip this transition because the environment was reset here. Select a new, random # transition and use this instead. This may cause the batch to contain the same # transition twice. idx = sample_batch_indexes(self.window_length + 1, self.nb_entries, size=1)[0] terminal0 = self.terminals[idx - 2] assert self.window_length + 1 <= idx < self.nb_entries # This code is slightly complicated by the fact that subsequent observations might be # from different episodes. We ensure that an experience never spans multiple episodes. # This is probably not that important in practice but it seems cleaner. state0 = [self.observations[idx - 1]] for offset in range(0, self.window_length - 1): current_idx = idx - 2 - offset assert current_idx >= 1 current_terminal = self.terminals[current_idx - 1] if current_terminal and not self.ignore_episode_boundaries: # The previously handled observation was terminal, don't add the current one. # Otherwise we would leak into a different episode. break state0.insert(0, self.observations[current_idx]) while len(state0) < self.window_length: state0.insert(0, zeroed_observation(state0[0])) action = self.actions[idx - 1] reward = self.rewards[idx - 1] terminal1 = self.terminals[idx - 1] # Okay, now we need to create the follow-up state. This is state0 shifted on timestep # to the right. Again, we need to be careful to not include an observation from the next # episode if the last state is terminal. state1 = [np.copy(x) for x in state0[1:]] state1.append(self.observations[idx]) assert len(state0) == self.window_length assert len(state1) == len(state0) experiences.append(Experience(state0=state0, action=action, reward=reward, state1=state1, terminal1=terminal1)) assert len(experiences) == batch_size return experiences
python
def sample(self, batch_size, batch_idxs=None): """Return a randomized batch of experiences # Argument batch_size (int): Size of the all batch batch_idxs (int): Indexes to extract # Returns A list of experiences randomly selected """ # It is not possible to tell whether the first state in the memory is terminal, because it # would require access to the "terminal" flag associated to the previous state. As a result # we will never return this first state (only using `self.terminals[0]` to know whether the # second state is terminal). # In addition we need enough entries to fill the desired window length. assert self.nb_entries >= self.window_length + 2, 'not enough entries in the memory' if batch_idxs is None: # Draw random indexes such that we have enough entries before each index to fill the # desired window length. batch_idxs = sample_batch_indexes( self.window_length, self.nb_entries - 1, size=batch_size) batch_idxs = np.array(batch_idxs) + 1 assert np.min(batch_idxs) >= self.window_length + 1 assert np.max(batch_idxs) < self.nb_entries assert len(batch_idxs) == batch_size # Create experiences experiences = [] for idx in batch_idxs: terminal0 = self.terminals[idx - 2] while terminal0: # Skip this transition because the environment was reset here. Select a new, random # transition and use this instead. This may cause the batch to contain the same # transition twice. idx = sample_batch_indexes(self.window_length + 1, self.nb_entries, size=1)[0] terminal0 = self.terminals[idx - 2] assert self.window_length + 1 <= idx < self.nb_entries # This code is slightly complicated by the fact that subsequent observations might be # from different episodes. We ensure that an experience never spans multiple episodes. # This is probably not that important in practice but it seems cleaner. state0 = [self.observations[idx - 1]] for offset in range(0, self.window_length - 1): current_idx = idx - 2 - offset assert current_idx >= 1 current_terminal = self.terminals[current_idx - 1] if current_terminal and not self.ignore_episode_boundaries: # The previously handled observation was terminal, don't add the current one. # Otherwise we would leak into a different episode. break state0.insert(0, self.observations[current_idx]) while len(state0) < self.window_length: state0.insert(0, zeroed_observation(state0[0])) action = self.actions[idx - 1] reward = self.rewards[idx - 1] terminal1 = self.terminals[idx - 1] # Okay, now we need to create the follow-up state. This is state0 shifted on timestep # to the right. Again, we need to be careful to not include an observation from the next # episode if the last state is terminal. state1 = [np.copy(x) for x in state0[1:]] state1.append(self.observations[idx]) assert len(state0) == self.window_length assert len(state1) == len(state0) experiences.append(Experience(state0=state0, action=action, reward=reward, state1=state1, terminal1=terminal1)) assert len(experiences) == batch_size return experiences
[ "def", "sample", "(", "self", ",", "batch_size", ",", "batch_idxs", "=", "None", ")", ":", "# It is not possible to tell whether the first state in the memory is terminal, because it", "# would require access to the \"terminal\" flag associated to the previous state. As a result", "# we will never return this first state (only using `self.terminals[0]` to know whether the", "# second state is terminal).", "# In addition we need enough entries to fill the desired window length.", "assert", "self", ".", "nb_entries", ">=", "self", ".", "window_length", "+", "2", ",", "'not enough entries in the memory'", "if", "batch_idxs", "is", "None", ":", "# Draw random indexes such that we have enough entries before each index to fill the", "# desired window length.", "batch_idxs", "=", "sample_batch_indexes", "(", "self", ".", "window_length", ",", "self", ".", "nb_entries", "-", "1", ",", "size", "=", "batch_size", ")", "batch_idxs", "=", "np", ".", "array", "(", "batch_idxs", ")", "+", "1", "assert", "np", ".", "min", "(", "batch_idxs", ")", ">=", "self", ".", "window_length", "+", "1", "assert", "np", ".", "max", "(", "batch_idxs", ")", "<", "self", ".", "nb_entries", "assert", "len", "(", "batch_idxs", ")", "==", "batch_size", "# Create experiences", "experiences", "=", "[", "]", "for", "idx", "in", "batch_idxs", ":", "terminal0", "=", "self", ".", "terminals", "[", "idx", "-", "2", "]", "while", "terminal0", ":", "# Skip this transition because the environment was reset here. Select a new, random", "# transition and use this instead. This may cause the batch to contain the same", "# transition twice.", "idx", "=", "sample_batch_indexes", "(", "self", ".", "window_length", "+", "1", ",", "self", ".", "nb_entries", ",", "size", "=", "1", ")", "[", "0", "]", "terminal0", "=", "self", ".", "terminals", "[", "idx", "-", "2", "]", "assert", "self", ".", "window_length", "+", "1", "<=", "idx", "<", "self", ".", "nb_entries", "# This code is slightly complicated by the fact that subsequent observations might be", "# from different episodes. We ensure that an experience never spans multiple episodes.", "# This is probably not that important in practice but it seems cleaner.", "state0", "=", "[", "self", ".", "observations", "[", "idx", "-", "1", "]", "]", "for", "offset", "in", "range", "(", "0", ",", "self", ".", "window_length", "-", "1", ")", ":", "current_idx", "=", "idx", "-", "2", "-", "offset", "assert", "current_idx", ">=", "1", "current_terminal", "=", "self", ".", "terminals", "[", "current_idx", "-", "1", "]", "if", "current_terminal", "and", "not", "self", ".", "ignore_episode_boundaries", ":", "# The previously handled observation was terminal, don't add the current one.", "# Otherwise we would leak into a different episode.", "break", "state0", ".", "insert", "(", "0", ",", "self", ".", "observations", "[", "current_idx", "]", ")", "while", "len", "(", "state0", ")", "<", "self", ".", "window_length", ":", "state0", ".", "insert", "(", "0", ",", "zeroed_observation", "(", "state0", "[", "0", "]", ")", ")", "action", "=", "self", ".", "actions", "[", "idx", "-", "1", "]", "reward", "=", "self", ".", "rewards", "[", "idx", "-", "1", "]", "terminal1", "=", "self", ".", "terminals", "[", "idx", "-", "1", "]", "# Okay, now we need to create the follow-up state. This is state0 shifted on timestep", "# to the right. Again, we need to be careful to not include an observation from the next", "# episode if the last state is terminal.", "state1", "=", "[", "np", ".", "copy", "(", "x", ")", "for", "x", "in", "state0", "[", "1", ":", "]", "]", "state1", ".", "append", "(", "self", ".", "observations", "[", "idx", "]", ")", "assert", "len", "(", "state0", ")", "==", "self", ".", "window_length", "assert", "len", "(", "state1", ")", "==", "len", "(", "state0", ")", "experiences", ".", "append", "(", "Experience", "(", "state0", "=", "state0", ",", "action", "=", "action", ",", "reward", "=", "reward", ",", "state1", "=", "state1", ",", "terminal1", "=", "terminal1", ")", ")", "assert", "len", "(", "experiences", ")", "==", "batch_size", "return", "experiences" ]
Return a randomized batch of experiences # Argument batch_size (int): Size of the all batch batch_idxs (int): Indexes to extract # Returns A list of experiences randomly selected
[ "Return", "a", "randomized", "batch", "of", "experiences" ]
e6efb0d8297ec38d704a3110b5d6ed74d09a05e3
https://github.com/keras-rl/keras-rl/blob/e6efb0d8297ec38d704a3110b5d6ed74d09a05e3/rl/memory.py#L171-L239
train
keras-rl/keras-rl
rl/memory.py
SequentialMemory.append
def append(self, observation, action, reward, terminal, training=True): """Append an observation to the memory # Argument observation (dict): Observation returned by environment action (int): Action taken to obtain this observation reward (float): Reward obtained by taking this action terminal (boolean): Is the state terminal """ super(SequentialMemory, self).append(observation, action, reward, terminal, training=training) # This needs to be understood as follows: in `observation`, take `action`, obtain `reward` # and weather the next state is `terminal` or not. if training: self.observations.append(observation) self.actions.append(action) self.rewards.append(reward) self.terminals.append(terminal)
python
def append(self, observation, action, reward, terminal, training=True): """Append an observation to the memory # Argument observation (dict): Observation returned by environment action (int): Action taken to obtain this observation reward (float): Reward obtained by taking this action terminal (boolean): Is the state terminal """ super(SequentialMemory, self).append(observation, action, reward, terminal, training=training) # This needs to be understood as follows: in `observation`, take `action`, obtain `reward` # and weather the next state is `terminal` or not. if training: self.observations.append(observation) self.actions.append(action) self.rewards.append(reward) self.terminals.append(terminal)
[ "def", "append", "(", "self", ",", "observation", ",", "action", ",", "reward", ",", "terminal", ",", "training", "=", "True", ")", ":", "super", "(", "SequentialMemory", ",", "self", ")", ".", "append", "(", "observation", ",", "action", ",", "reward", ",", "terminal", ",", "training", "=", "training", ")", "# This needs to be understood as follows: in `observation`, take `action`, obtain `reward`", "# and weather the next state is `terminal` or not.", "if", "training", ":", "self", ".", "observations", ".", "append", "(", "observation", ")", "self", ".", "actions", ".", "append", "(", "action", ")", "self", ".", "rewards", ".", "append", "(", "reward", ")", "self", ".", "terminals", ".", "append", "(", "terminal", ")" ]
Append an observation to the memory # Argument observation (dict): Observation returned by environment action (int): Action taken to obtain this observation reward (float): Reward obtained by taking this action terminal (boolean): Is the state terminal
[ "Append", "an", "observation", "to", "the", "memory" ]
e6efb0d8297ec38d704a3110b5d6ed74d09a05e3
https://github.com/keras-rl/keras-rl/blob/e6efb0d8297ec38d704a3110b5d6ed74d09a05e3/rl/memory.py#L241-L258
train
keras-rl/keras-rl
rl/memory.py
SequentialMemory.get_config
def get_config(self): """Return configurations of SequentialMemory # Returns Dict of config """ config = super(SequentialMemory, self).get_config() config['limit'] = self.limit return config
python
def get_config(self): """Return configurations of SequentialMemory # Returns Dict of config """ config = super(SequentialMemory, self).get_config() config['limit'] = self.limit return config
[ "def", "get_config", "(", "self", ")", ":", "config", "=", "super", "(", "SequentialMemory", ",", "self", ")", ".", "get_config", "(", ")", "config", "[", "'limit'", "]", "=", "self", ".", "limit", "return", "config" ]
Return configurations of SequentialMemory # Returns Dict of config
[ "Return", "configurations", "of", "SequentialMemory" ]
e6efb0d8297ec38d704a3110b5d6ed74d09a05e3
https://github.com/keras-rl/keras-rl/blob/e6efb0d8297ec38d704a3110b5d6ed74d09a05e3/rl/memory.py#L269-L277
train
keras-rl/keras-rl
rl/memory.py
EpisodeParameterMemory.sample
def sample(self, batch_size, batch_idxs=None): """Return a randomized batch of params and rewards # Argument batch_size (int): Size of the all batch batch_idxs (int): Indexes to extract # Returns A list of params randomly selected and a list of associated rewards """ if batch_idxs is None: batch_idxs = sample_batch_indexes(0, self.nb_entries, size=batch_size) assert len(batch_idxs) == batch_size batch_params = [] batch_total_rewards = [] for idx in batch_idxs: batch_params.append(self.params[idx]) batch_total_rewards.append(self.total_rewards[idx]) return batch_params, batch_total_rewards
python
def sample(self, batch_size, batch_idxs=None): """Return a randomized batch of params and rewards # Argument batch_size (int): Size of the all batch batch_idxs (int): Indexes to extract # Returns A list of params randomly selected and a list of associated rewards """ if batch_idxs is None: batch_idxs = sample_batch_indexes(0, self.nb_entries, size=batch_size) assert len(batch_idxs) == batch_size batch_params = [] batch_total_rewards = [] for idx in batch_idxs: batch_params.append(self.params[idx]) batch_total_rewards.append(self.total_rewards[idx]) return batch_params, batch_total_rewards
[ "def", "sample", "(", "self", ",", "batch_size", ",", "batch_idxs", "=", "None", ")", ":", "if", "batch_idxs", "is", "None", ":", "batch_idxs", "=", "sample_batch_indexes", "(", "0", ",", "self", ".", "nb_entries", ",", "size", "=", "batch_size", ")", "assert", "len", "(", "batch_idxs", ")", "==", "batch_size", "batch_params", "=", "[", "]", "batch_total_rewards", "=", "[", "]", "for", "idx", "in", "batch_idxs", ":", "batch_params", ".", "append", "(", "self", ".", "params", "[", "idx", "]", ")", "batch_total_rewards", ".", "append", "(", "self", ".", "total_rewards", "[", "idx", "]", ")", "return", "batch_params", ",", "batch_total_rewards" ]
Return a randomized batch of params and rewards # Argument batch_size (int): Size of the all batch batch_idxs (int): Indexes to extract # Returns A list of params randomly selected and a list of associated rewards
[ "Return", "a", "randomized", "batch", "of", "params", "and", "rewards" ]
e6efb0d8297ec38d704a3110b5d6ed74d09a05e3
https://github.com/keras-rl/keras-rl/blob/e6efb0d8297ec38d704a3110b5d6ed74d09a05e3/rl/memory.py#L289-L307
train
keras-rl/keras-rl
rl/memory.py
EpisodeParameterMemory.append
def append(self, observation, action, reward, terminal, training=True): """Append a reward to the memory # Argument observation (dict): Observation returned by environment action (int): Action taken to obtain this observation reward (float): Reward obtained by taking this action terminal (boolean): Is the state terminal """ super(EpisodeParameterMemory, self).append(observation, action, reward, terminal, training=training) if training: self.intermediate_rewards.append(reward)
python
def append(self, observation, action, reward, terminal, training=True): """Append a reward to the memory # Argument observation (dict): Observation returned by environment action (int): Action taken to obtain this observation reward (float): Reward obtained by taking this action terminal (boolean): Is the state terminal """ super(EpisodeParameterMemory, self).append(observation, action, reward, terminal, training=training) if training: self.intermediate_rewards.append(reward)
[ "def", "append", "(", "self", ",", "observation", ",", "action", ",", "reward", ",", "terminal", ",", "training", "=", "True", ")", ":", "super", "(", "EpisodeParameterMemory", ",", "self", ")", ".", "append", "(", "observation", ",", "action", ",", "reward", ",", "terminal", ",", "training", "=", "training", ")", "if", "training", ":", "self", ".", "intermediate_rewards", ".", "append", "(", "reward", ")" ]
Append a reward to the memory # Argument observation (dict): Observation returned by environment action (int): Action taken to obtain this observation reward (float): Reward obtained by taking this action terminal (boolean): Is the state terminal
[ "Append", "a", "reward", "to", "the", "memory" ]
e6efb0d8297ec38d704a3110b5d6ed74d09a05e3
https://github.com/keras-rl/keras-rl/blob/e6efb0d8297ec38d704a3110b5d6ed74d09a05e3/rl/memory.py#L309-L320
train
keras-rl/keras-rl
rl/memory.py
EpisodeParameterMemory.finalize_episode
def finalize_episode(self, params): """Closes the current episode, sums up rewards and stores the parameters # Argument params (object): Parameters associated with the episode to be stored and then retrieved back in sample() """ total_reward = sum(self.intermediate_rewards) self.total_rewards.append(total_reward) self.params.append(params) self.intermediate_rewards = []
python
def finalize_episode(self, params): """Closes the current episode, sums up rewards and stores the parameters # Argument params (object): Parameters associated with the episode to be stored and then retrieved back in sample() """ total_reward = sum(self.intermediate_rewards) self.total_rewards.append(total_reward) self.params.append(params) self.intermediate_rewards = []
[ "def", "finalize_episode", "(", "self", ",", "params", ")", ":", "total_reward", "=", "sum", "(", "self", ".", "intermediate_rewards", ")", "self", ".", "total_rewards", ".", "append", "(", "total_reward", ")", "self", ".", "params", ".", "append", "(", "params", ")", "self", ".", "intermediate_rewards", "=", "[", "]" ]
Closes the current episode, sums up rewards and stores the parameters # Argument params (object): Parameters associated with the episode to be stored and then retrieved back in sample()
[ "Closes", "the", "current", "episode", "sums", "up", "rewards", "and", "stores", "the", "parameters" ]
e6efb0d8297ec38d704a3110b5d6ed74d09a05e3
https://github.com/keras-rl/keras-rl/blob/e6efb0d8297ec38d704a3110b5d6ed74d09a05e3/rl/memory.py#L322-L331
train
keras-rl/keras-rl
rl/common/cmd_util.py
make_gym_env
def make_gym_env(env_id, num_env=2, seed=123, wrapper_kwargs=None, start_index=0): """ Create a wrapped, SubprocVecEnv for Gym Environments. """ if wrapper_kwargs is None: wrapper_kwargs = {} def make_env(rank): # pylint: disable=C0111 def _thunk(): env = gym.make(env_id) env.seed(seed + rank) return env return _thunk set_global_seeds(seed) return SubprocVecEnv([make_env(i + start_index) for i in range(num_env)])
python
def make_gym_env(env_id, num_env=2, seed=123, wrapper_kwargs=None, start_index=0): """ Create a wrapped, SubprocVecEnv for Gym Environments. """ if wrapper_kwargs is None: wrapper_kwargs = {} def make_env(rank): # pylint: disable=C0111 def _thunk(): env = gym.make(env_id) env.seed(seed + rank) return env return _thunk set_global_seeds(seed) return SubprocVecEnv([make_env(i + start_index) for i in range(num_env)])
[ "def", "make_gym_env", "(", "env_id", ",", "num_env", "=", "2", ",", "seed", "=", "123", ",", "wrapper_kwargs", "=", "None", ",", "start_index", "=", "0", ")", ":", "if", "wrapper_kwargs", "is", "None", ":", "wrapper_kwargs", "=", "{", "}", "def", "make_env", "(", "rank", ")", ":", "# pylint: disable=C0111", "def", "_thunk", "(", ")", ":", "env", "=", "gym", ".", "make", "(", "env_id", ")", "env", ".", "seed", "(", "seed", "+", "rank", ")", "return", "env", "return", "_thunk", "set_global_seeds", "(", "seed", ")", "return", "SubprocVecEnv", "(", "[", "make_env", "(", "i", "+", "start_index", ")", "for", "i", "in", "range", "(", "num_env", ")", "]", ")" ]
Create a wrapped, SubprocVecEnv for Gym Environments.
[ "Create", "a", "wrapped", "SubprocVecEnv", "for", "Gym", "Environments", "." ]
e6efb0d8297ec38d704a3110b5d6ed74d09a05e3
https://github.com/keras-rl/keras-rl/blob/e6efb0d8297ec38d704a3110b5d6ed74d09a05e3/rl/common/cmd_util.py#L7-L22
train
awslabs/aws-sam-cli
samcli/commands/local/cli_common/options.py
invoke_common_options
def invoke_common_options(f): """ Common CLI options shared by "local invoke" and "local start-api" commands :param f: Callback passed by Click """ invoke_options = [ template_click_option(), click.option('--env-vars', '-n', type=click.Path(exists=True), help="JSON file containing values for Lambda function's environment variables."), parameter_override_click_option(), click.option('--debug-port', '-d', help="When specified, Lambda function container will start in debug mode and will expose this " "port on localhost.", envvar="SAM_DEBUG_PORT"), click.option('--debugger-path', help="Host path to a debugger that will be mounted into the Lambda container."), click.option('--debug-args', help="Additional arguments to be passed to the debugger.", envvar="DEBUGGER_ARGS"), click.option('--docker-volume-basedir', '-v', envvar="SAM_DOCKER_VOLUME_BASEDIR", help="Specifies the location basedir where the SAM file exists. If the Docker is running on " "a remote machine, you must mount the path where the SAM file exists on the docker machine " "and modify this value to match the remote machine."), click.option('--log-file', '-l', help="logfile to send runtime logs to."), click.option('--layer-cache-basedir', type=click.Path(exists=False, file_okay=False), envvar="SAM_LAYER_CACHE_BASEDIR", help="Specifies the location basedir where the Layers your template uses will be downloaded to.", default=get_default_layer_cache_dir()), ] + docker_click_options() + [ click.option('--force-image-build', is_flag=True, help='Specify whether CLI should rebuild the image used for invoking functions with layers.', envvar='SAM_FORCE_IMAGE_BUILD', default=False), ] # Reverse the list to maintain ordering of options in help text printed with --help for option in reversed(invoke_options): option(f) return f
python
def invoke_common_options(f): """ Common CLI options shared by "local invoke" and "local start-api" commands :param f: Callback passed by Click """ invoke_options = [ template_click_option(), click.option('--env-vars', '-n', type=click.Path(exists=True), help="JSON file containing values for Lambda function's environment variables."), parameter_override_click_option(), click.option('--debug-port', '-d', help="When specified, Lambda function container will start in debug mode and will expose this " "port on localhost.", envvar="SAM_DEBUG_PORT"), click.option('--debugger-path', help="Host path to a debugger that will be mounted into the Lambda container."), click.option('--debug-args', help="Additional arguments to be passed to the debugger.", envvar="DEBUGGER_ARGS"), click.option('--docker-volume-basedir', '-v', envvar="SAM_DOCKER_VOLUME_BASEDIR", help="Specifies the location basedir where the SAM file exists. If the Docker is running on " "a remote machine, you must mount the path where the SAM file exists on the docker machine " "and modify this value to match the remote machine."), click.option('--log-file', '-l', help="logfile to send runtime logs to."), click.option('--layer-cache-basedir', type=click.Path(exists=False, file_okay=False), envvar="SAM_LAYER_CACHE_BASEDIR", help="Specifies the location basedir where the Layers your template uses will be downloaded to.", default=get_default_layer_cache_dir()), ] + docker_click_options() + [ click.option('--force-image-build', is_flag=True, help='Specify whether CLI should rebuild the image used for invoking functions with layers.', envvar='SAM_FORCE_IMAGE_BUILD', default=False), ] # Reverse the list to maintain ordering of options in help text printed with --help for option in reversed(invoke_options): option(f) return f
[ "def", "invoke_common_options", "(", "f", ")", ":", "invoke_options", "=", "[", "template_click_option", "(", ")", ",", "click", ".", "option", "(", "'--env-vars'", ",", "'-n'", ",", "type", "=", "click", ".", "Path", "(", "exists", "=", "True", ")", ",", "help", "=", "\"JSON file containing values for Lambda function's environment variables.\"", ")", ",", "parameter_override_click_option", "(", ")", ",", "click", ".", "option", "(", "'--debug-port'", ",", "'-d'", ",", "help", "=", "\"When specified, Lambda function container will start in debug mode and will expose this \"", "\"port on localhost.\"", ",", "envvar", "=", "\"SAM_DEBUG_PORT\"", ")", ",", "click", ".", "option", "(", "'--debugger-path'", ",", "help", "=", "\"Host path to a debugger that will be mounted into the Lambda container.\"", ")", ",", "click", ".", "option", "(", "'--debug-args'", ",", "help", "=", "\"Additional arguments to be passed to the debugger.\"", ",", "envvar", "=", "\"DEBUGGER_ARGS\"", ")", ",", "click", ".", "option", "(", "'--docker-volume-basedir'", ",", "'-v'", ",", "envvar", "=", "\"SAM_DOCKER_VOLUME_BASEDIR\"", ",", "help", "=", "\"Specifies the location basedir where the SAM file exists. If the Docker is running on \"", "\"a remote machine, you must mount the path where the SAM file exists on the docker machine \"", "\"and modify this value to match the remote machine.\"", ")", ",", "click", ".", "option", "(", "'--log-file'", ",", "'-l'", ",", "help", "=", "\"logfile to send runtime logs to.\"", ")", ",", "click", ".", "option", "(", "'--layer-cache-basedir'", ",", "type", "=", "click", ".", "Path", "(", "exists", "=", "False", ",", "file_okay", "=", "False", ")", ",", "envvar", "=", "\"SAM_LAYER_CACHE_BASEDIR\"", ",", "help", "=", "\"Specifies the location basedir where the Layers your template uses will be downloaded to.\"", ",", "default", "=", "get_default_layer_cache_dir", "(", ")", ")", ",", "]", "+", "docker_click_options", "(", ")", "+", "[", "click", ".", "option", "(", "'--force-image-build'", ",", "is_flag", "=", "True", ",", "help", "=", "'Specify whether CLI should rebuild the image used for invoking functions with layers.'", ",", "envvar", "=", "'SAM_FORCE_IMAGE_BUILD'", ",", "default", "=", "False", ")", ",", "]", "# Reverse the list to maintain ordering of options in help text printed with --help", "for", "option", "in", "reversed", "(", "invoke_options", ")", ":", "option", "(", "f", ")", "return", "f" ]
Common CLI options shared by "local invoke" and "local start-api" commands :param f: Callback passed by Click
[ "Common", "CLI", "options", "shared", "by", "local", "invoke", "and", "local", "start", "-", "api", "commands" ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/commands/local/cli_common/options.py#L73-L130
train
awslabs/aws-sam-cli
samcli/commands/_utils/options.py
get_or_default_template_file_name
def get_or_default_template_file_name(ctx, param, provided_value, include_build): """ Default value for the template file name option is more complex than what Click can handle. This method either returns user provided file name or one of the two default options (template.yaml/template.yml) depending on the file that exists :param ctx: Click Context :param param: Param name :param provided_value: Value provided by Click. It could either be the default value or provided by user. :return: Actual value to be used in the CLI """ search_paths = [ "template.yaml", "template.yml", ] if include_build: search_paths.insert(0, os.path.join(".aws-sam", "build", "template.yaml")) if provided_value == _TEMPLATE_OPTION_DEFAULT_VALUE: # Default value was used. Value can either be template.yaml or template.yml. Decide based on which file exists # .yml is the default, even if it does not exist. provided_value = "template.yml" for option in search_paths: if os.path.exists(option): provided_value = option break result = os.path.abspath(provided_value) LOG.debug("Using SAM Template at %s", result) return result
python
def get_or_default_template_file_name(ctx, param, provided_value, include_build): """ Default value for the template file name option is more complex than what Click can handle. This method either returns user provided file name or one of the two default options (template.yaml/template.yml) depending on the file that exists :param ctx: Click Context :param param: Param name :param provided_value: Value provided by Click. It could either be the default value or provided by user. :return: Actual value to be used in the CLI """ search_paths = [ "template.yaml", "template.yml", ] if include_build: search_paths.insert(0, os.path.join(".aws-sam", "build", "template.yaml")) if provided_value == _TEMPLATE_OPTION_DEFAULT_VALUE: # Default value was used. Value can either be template.yaml or template.yml. Decide based on which file exists # .yml is the default, even if it does not exist. provided_value = "template.yml" for option in search_paths: if os.path.exists(option): provided_value = option break result = os.path.abspath(provided_value) LOG.debug("Using SAM Template at %s", result) return result
[ "def", "get_or_default_template_file_name", "(", "ctx", ",", "param", ",", "provided_value", ",", "include_build", ")", ":", "search_paths", "=", "[", "\"template.yaml\"", ",", "\"template.yml\"", ",", "]", "if", "include_build", ":", "search_paths", ".", "insert", "(", "0", ",", "os", ".", "path", ".", "join", "(", "\".aws-sam\"", ",", "\"build\"", ",", "\"template.yaml\"", ")", ")", "if", "provided_value", "==", "_TEMPLATE_OPTION_DEFAULT_VALUE", ":", "# Default value was used. Value can either be template.yaml or template.yml. Decide based on which file exists", "# .yml is the default, even if it does not exist.", "provided_value", "=", "\"template.yml\"", "for", "option", "in", "search_paths", ":", "if", "os", ".", "path", ".", "exists", "(", "option", ")", ":", "provided_value", "=", "option", "break", "result", "=", "os", ".", "path", ".", "abspath", "(", "provided_value", ")", "LOG", ".", "debug", "(", "\"Using SAM Template at %s\"", ",", "result", ")", "return", "result" ]
Default value for the template file name option is more complex than what Click can handle. This method either returns user provided file name or one of the two default options (template.yaml/template.yml) depending on the file that exists :param ctx: Click Context :param param: Param name :param provided_value: Value provided by Click. It could either be the default value or provided by user. :return: Actual value to be used in the CLI
[ "Default", "value", "for", "the", "template", "file", "name", "option", "is", "more", "complex", "than", "what", "Click", "can", "handle", ".", "This", "method", "either", "returns", "user", "provided", "file", "name", "or", "one", "of", "the", "two", "default", "options", "(", "template", ".", "yaml", "/", "template", ".", "yml", ")", "depending", "on", "the", "file", "that", "exists" ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/commands/_utils/options.py#L18-L50
train
awslabs/aws-sam-cli
samcli/commands/_utils/options.py
template_click_option
def template_click_option(include_build=True): """ Click Option for template option """ return click.option('--template', '-t', default=_TEMPLATE_OPTION_DEFAULT_VALUE, type=click.Path(), envvar="SAM_TEMPLATE_FILE", callback=partial(get_or_default_template_file_name, include_build=include_build), show_default=True, help="AWS SAM template file")
python
def template_click_option(include_build=True): """ Click Option for template option """ return click.option('--template', '-t', default=_TEMPLATE_OPTION_DEFAULT_VALUE, type=click.Path(), envvar="SAM_TEMPLATE_FILE", callback=partial(get_or_default_template_file_name, include_build=include_build), show_default=True, help="AWS SAM template file")
[ "def", "template_click_option", "(", "include_build", "=", "True", ")", ":", "return", "click", ".", "option", "(", "'--template'", ",", "'-t'", ",", "default", "=", "_TEMPLATE_OPTION_DEFAULT_VALUE", ",", "type", "=", "click", ".", "Path", "(", ")", ",", "envvar", "=", "\"SAM_TEMPLATE_FILE\"", ",", "callback", "=", "partial", "(", "get_or_default_template_file_name", ",", "include_build", "=", "include_build", ")", ",", "show_default", "=", "True", ",", "help", "=", "\"AWS SAM template file\"", ")" ]
Click Option for template option
[ "Click", "Option", "for", "template", "option" ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/commands/_utils/options.py#L73-L83
train
awslabs/aws-sam-cli
samcli/lib/utils/tar.py
create_tarball
def create_tarball(tar_paths): """ Context Manger that creates the tarball of the Docker Context to use for building the image Parameters ---------- tar_paths dict(str, str) Key representing a full path to the file or directory and the Value representing the path within the tarball Yields ------ The tarball file """ tarballfile = TemporaryFile() with tarfile.open(fileobj=tarballfile, mode='w') as archive: for path_on_system, path_in_tarball in tar_paths.items(): archive.add(path_on_system, arcname=path_in_tarball) # Flush are seek to the beginning of the file tarballfile.flush() tarballfile.seek(0) try: yield tarballfile finally: tarballfile.close()
python
def create_tarball(tar_paths): """ Context Manger that creates the tarball of the Docker Context to use for building the image Parameters ---------- tar_paths dict(str, str) Key representing a full path to the file or directory and the Value representing the path within the tarball Yields ------ The tarball file """ tarballfile = TemporaryFile() with tarfile.open(fileobj=tarballfile, mode='w') as archive: for path_on_system, path_in_tarball in tar_paths.items(): archive.add(path_on_system, arcname=path_in_tarball) # Flush are seek to the beginning of the file tarballfile.flush() tarballfile.seek(0) try: yield tarballfile finally: tarballfile.close()
[ "def", "create_tarball", "(", "tar_paths", ")", ":", "tarballfile", "=", "TemporaryFile", "(", ")", "with", "tarfile", ".", "open", "(", "fileobj", "=", "tarballfile", ",", "mode", "=", "'w'", ")", "as", "archive", ":", "for", "path_on_system", ",", "path_in_tarball", "in", "tar_paths", ".", "items", "(", ")", ":", "archive", ".", "add", "(", "path_on_system", ",", "arcname", "=", "path_in_tarball", ")", "# Flush are seek to the beginning of the file", "tarballfile", ".", "flush", "(", ")", "tarballfile", ".", "seek", "(", "0", ")", "try", ":", "yield", "tarballfile", "finally", ":", "tarballfile", ".", "close", "(", ")" ]
Context Manger that creates the tarball of the Docker Context to use for building the image Parameters ---------- tar_paths dict(str, str) Key representing a full path to the file or directory and the Value representing the path within the tarball Yields ------ The tarball file
[ "Context", "Manger", "that", "creates", "the", "tarball", "of", "the", "Docker", "Context", "to", "use", "for", "building", "the", "image" ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/lib/utils/tar.py#L11-L37
train
awslabs/aws-sam-cli
samcli/commands/local/lib/local_lambda_service.py
LocalLambdaService.start
def start(self): """ Creates and starts the Local Lambda Invoke service. This method will block until the service is stopped manually using an interrupt. After the service is started, callers can make HTTP requests to the endpoint to invoke the Lambda function and receive a response. NOTE: This is a blocking call that will not return until the thread is interrupted with SIGINT/SIGTERM """ # We care about passing only stderr to the Service and not stdout because stdout from Docker container # contains the response to the API which is sent out as HTTP response. Only stderr needs to be printed # to the console or a log file. stderr from Docker container contains runtime logs and output of print # statements from the Lambda function service = LocalLambdaInvokeService(lambda_runner=self.lambda_runner, port=self.port, host=self.host, stderr=self.stderr_stream) service.create() LOG.info("Starting the Local Lambda Service. You can now invoke your Lambda Functions defined in your template" " through the endpoint.") service.run()
python
def start(self): """ Creates and starts the Local Lambda Invoke service. This method will block until the service is stopped manually using an interrupt. After the service is started, callers can make HTTP requests to the endpoint to invoke the Lambda function and receive a response. NOTE: This is a blocking call that will not return until the thread is interrupted with SIGINT/SIGTERM """ # We care about passing only stderr to the Service and not stdout because stdout from Docker container # contains the response to the API which is sent out as HTTP response. Only stderr needs to be printed # to the console or a log file. stderr from Docker container contains runtime logs and output of print # statements from the Lambda function service = LocalLambdaInvokeService(lambda_runner=self.lambda_runner, port=self.port, host=self.host, stderr=self.stderr_stream) service.create() LOG.info("Starting the Local Lambda Service. You can now invoke your Lambda Functions defined in your template" " through the endpoint.") service.run()
[ "def", "start", "(", "self", ")", ":", "# We care about passing only stderr to the Service and not stdout because stdout from Docker container", "# contains the response to the API which is sent out as HTTP response. Only stderr needs to be printed", "# to the console or a log file. stderr from Docker container contains runtime logs and output of print", "# statements from the Lambda function", "service", "=", "LocalLambdaInvokeService", "(", "lambda_runner", "=", "self", ".", "lambda_runner", ",", "port", "=", "self", ".", "port", ",", "host", "=", "self", ".", "host", ",", "stderr", "=", "self", ".", "stderr_stream", ")", "service", ".", "create", "(", ")", "LOG", ".", "info", "(", "\"Starting the Local Lambda Service. You can now invoke your Lambda Functions defined in your template\"", "\" through the endpoint.\"", ")", "service", ".", "run", "(", ")" ]
Creates and starts the Local Lambda Invoke service. This method will block until the service is stopped manually using an interrupt. After the service is started, callers can make HTTP requests to the endpoint to invoke the Lambda function and receive a response. NOTE: This is a blocking call that will not return until the thread is interrupted with SIGINT/SIGTERM
[ "Creates", "and", "starts", "the", "Local", "Lambda", "Invoke", "service", ".", "This", "method", "will", "block", "until", "the", "service", "is", "stopped", "manually", "using", "an", "interrupt", ".", "After", "the", "service", "is", "started", "callers", "can", "make", "HTTP", "requests", "to", "the", "endpoint", "to", "invoke", "the", "Lambda", "function", "and", "receive", "a", "response", "." ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/commands/local/lib/local_lambda_service.py#L35-L58
train
awslabs/aws-sam-cli
samcli/commands/local/lib/sam_function_provider.py
SamFunctionProvider._extract_functions
def _extract_functions(resources): """ Extracts and returns function information from the given dictionary of SAM/CloudFormation resources. This method supports functions defined with AWS::Serverless::Function and AWS::Lambda::Function :param dict resources: Dictionary of SAM/CloudFormation resources :return dict(string : samcli.commands.local.lib.provider.Function): Dictionary of function LogicalId to the Function configuration object """ result = {} for name, resource in resources.items(): resource_type = resource.get("Type") resource_properties = resource.get("Properties", {}) if resource_type == SamFunctionProvider._SERVERLESS_FUNCTION: layers = SamFunctionProvider._parse_layer_info(resource_properties.get("Layers", []), resources) result[name] = SamFunctionProvider._convert_sam_function_resource(name, resource_properties, layers) elif resource_type == SamFunctionProvider._LAMBDA_FUNCTION: layers = SamFunctionProvider._parse_layer_info(resource_properties.get("Layers", []), resources) result[name] = SamFunctionProvider._convert_lambda_function_resource(name, resource_properties, layers) # We don't care about other resource types. Just ignore them return result
python
def _extract_functions(resources): """ Extracts and returns function information from the given dictionary of SAM/CloudFormation resources. This method supports functions defined with AWS::Serverless::Function and AWS::Lambda::Function :param dict resources: Dictionary of SAM/CloudFormation resources :return dict(string : samcli.commands.local.lib.provider.Function): Dictionary of function LogicalId to the Function configuration object """ result = {} for name, resource in resources.items(): resource_type = resource.get("Type") resource_properties = resource.get("Properties", {}) if resource_type == SamFunctionProvider._SERVERLESS_FUNCTION: layers = SamFunctionProvider._parse_layer_info(resource_properties.get("Layers", []), resources) result[name] = SamFunctionProvider._convert_sam_function_resource(name, resource_properties, layers) elif resource_type == SamFunctionProvider._LAMBDA_FUNCTION: layers = SamFunctionProvider._parse_layer_info(resource_properties.get("Layers", []), resources) result[name] = SamFunctionProvider._convert_lambda_function_resource(name, resource_properties, layers) # We don't care about other resource types. Just ignore them return result
[ "def", "_extract_functions", "(", "resources", ")", ":", "result", "=", "{", "}", "for", "name", ",", "resource", "in", "resources", ".", "items", "(", ")", ":", "resource_type", "=", "resource", ".", "get", "(", "\"Type\"", ")", "resource_properties", "=", "resource", ".", "get", "(", "\"Properties\"", ",", "{", "}", ")", "if", "resource_type", "==", "SamFunctionProvider", ".", "_SERVERLESS_FUNCTION", ":", "layers", "=", "SamFunctionProvider", ".", "_parse_layer_info", "(", "resource_properties", ".", "get", "(", "\"Layers\"", ",", "[", "]", ")", ",", "resources", ")", "result", "[", "name", "]", "=", "SamFunctionProvider", ".", "_convert_sam_function_resource", "(", "name", ",", "resource_properties", ",", "layers", ")", "elif", "resource_type", "==", "SamFunctionProvider", ".", "_LAMBDA_FUNCTION", ":", "layers", "=", "SamFunctionProvider", ".", "_parse_layer_info", "(", "resource_properties", ".", "get", "(", "\"Layers\"", ",", "[", "]", ")", ",", "resources", ")", "result", "[", "name", "]", "=", "SamFunctionProvider", ".", "_convert_lambda_function_resource", "(", "name", ",", "resource_properties", ",", "layers", ")", "# We don't care about other resource types. Just ignore them", "return", "result" ]
Extracts and returns function information from the given dictionary of SAM/CloudFormation resources. This method supports functions defined with AWS::Serverless::Function and AWS::Lambda::Function :param dict resources: Dictionary of SAM/CloudFormation resources :return dict(string : samcli.commands.local.lib.provider.Function): Dictionary of function LogicalId to the Function configuration object
[ "Extracts", "and", "returns", "function", "information", "from", "the", "given", "dictionary", "of", "SAM", "/", "CloudFormation", "resources", ".", "This", "method", "supports", "functions", "defined", "with", "AWS", "::", "Serverless", "::", "Function", "and", "AWS", "::", "Lambda", "::", "Function" ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/commands/local/lib/sam_function_provider.py#L81-L108
train
awslabs/aws-sam-cli
samcli/commands/local/lib/sam_function_provider.py
SamFunctionProvider._convert_sam_function_resource
def _convert_sam_function_resource(name, resource_properties, layers): """ Converts a AWS::Serverless::Function resource to a Function configuration usable by the provider. :param string name: LogicalID of the resource NOTE: This is *not* the function name because not all functions declare a name :param dict resource_properties: Properties of this resource :return samcli.commands.local.lib.provider.Function: Function configuration """ codeuri = SamFunctionProvider._extract_sam_function_codeuri(name, resource_properties, "CodeUri") LOG.debug("Found Serverless function with name='%s' and CodeUri='%s'", name, codeuri) return Function( name=name, runtime=resource_properties.get("Runtime"), memory=resource_properties.get("MemorySize"), timeout=resource_properties.get("Timeout"), handler=resource_properties.get("Handler"), codeuri=codeuri, environment=resource_properties.get("Environment"), rolearn=resource_properties.get("Role"), layers=layers )
python
def _convert_sam_function_resource(name, resource_properties, layers): """ Converts a AWS::Serverless::Function resource to a Function configuration usable by the provider. :param string name: LogicalID of the resource NOTE: This is *not* the function name because not all functions declare a name :param dict resource_properties: Properties of this resource :return samcli.commands.local.lib.provider.Function: Function configuration """ codeuri = SamFunctionProvider._extract_sam_function_codeuri(name, resource_properties, "CodeUri") LOG.debug("Found Serverless function with name='%s' and CodeUri='%s'", name, codeuri) return Function( name=name, runtime=resource_properties.get("Runtime"), memory=resource_properties.get("MemorySize"), timeout=resource_properties.get("Timeout"), handler=resource_properties.get("Handler"), codeuri=codeuri, environment=resource_properties.get("Environment"), rolearn=resource_properties.get("Role"), layers=layers )
[ "def", "_convert_sam_function_resource", "(", "name", ",", "resource_properties", ",", "layers", ")", ":", "codeuri", "=", "SamFunctionProvider", ".", "_extract_sam_function_codeuri", "(", "name", ",", "resource_properties", ",", "\"CodeUri\"", ")", "LOG", ".", "debug", "(", "\"Found Serverless function with name='%s' and CodeUri='%s'\"", ",", "name", ",", "codeuri", ")", "return", "Function", "(", "name", "=", "name", ",", "runtime", "=", "resource_properties", ".", "get", "(", "\"Runtime\"", ")", ",", "memory", "=", "resource_properties", ".", "get", "(", "\"MemorySize\"", ")", ",", "timeout", "=", "resource_properties", ".", "get", "(", "\"Timeout\"", ")", ",", "handler", "=", "resource_properties", ".", "get", "(", "\"Handler\"", ")", ",", "codeuri", "=", "codeuri", ",", "environment", "=", "resource_properties", ".", "get", "(", "\"Environment\"", ")", ",", "rolearn", "=", "resource_properties", ".", "get", "(", "\"Role\"", ")", ",", "layers", "=", "layers", ")" ]
Converts a AWS::Serverless::Function resource to a Function configuration usable by the provider. :param string name: LogicalID of the resource NOTE: This is *not* the function name because not all functions declare a name :param dict resource_properties: Properties of this resource :return samcli.commands.local.lib.provider.Function: Function configuration
[ "Converts", "a", "AWS", "::", "Serverless", "::", "Function", "resource", "to", "a", "Function", "configuration", "usable", "by", "the", "provider", "." ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/commands/local/lib/sam_function_provider.py#L111-L135
train
awslabs/aws-sam-cli
samcli/commands/local/lib/sam_function_provider.py
SamFunctionProvider._extract_sam_function_codeuri
def _extract_sam_function_codeuri(name, resource_properties, code_property_key): """ Extracts the SAM Function CodeUri from the Resource Properties Parameters ---------- name str LogicalId of the resource resource_properties dict Dictionary representing the Properties of the Resource code_property_key str Property Key of the code on the Resource Returns ------- str Representing the local code path """ codeuri = resource_properties.get(code_property_key, SamFunctionProvider._DEFAULT_CODEURI) # CodeUri can be a dictionary of S3 Bucket/Key or a S3 URI, neither of which are supported if isinstance(codeuri, dict) or \ (isinstance(codeuri, six.string_types) and codeuri.startswith("s3://")): codeuri = SamFunctionProvider._DEFAULT_CODEURI LOG.warning("Lambda function '%s' has specified S3 location for CodeUri which is unsupported. " "Using default value of '%s' instead", name, codeuri) return codeuri
python
def _extract_sam_function_codeuri(name, resource_properties, code_property_key): """ Extracts the SAM Function CodeUri from the Resource Properties Parameters ---------- name str LogicalId of the resource resource_properties dict Dictionary representing the Properties of the Resource code_property_key str Property Key of the code on the Resource Returns ------- str Representing the local code path """ codeuri = resource_properties.get(code_property_key, SamFunctionProvider._DEFAULT_CODEURI) # CodeUri can be a dictionary of S3 Bucket/Key or a S3 URI, neither of which are supported if isinstance(codeuri, dict) or \ (isinstance(codeuri, six.string_types) and codeuri.startswith("s3://")): codeuri = SamFunctionProvider._DEFAULT_CODEURI LOG.warning("Lambda function '%s' has specified S3 location for CodeUri which is unsupported. " "Using default value of '%s' instead", name, codeuri) return codeuri
[ "def", "_extract_sam_function_codeuri", "(", "name", ",", "resource_properties", ",", "code_property_key", ")", ":", "codeuri", "=", "resource_properties", ".", "get", "(", "code_property_key", ",", "SamFunctionProvider", ".", "_DEFAULT_CODEURI", ")", "# CodeUri can be a dictionary of S3 Bucket/Key or a S3 URI, neither of which are supported", "if", "isinstance", "(", "codeuri", ",", "dict", ")", "or", "(", "isinstance", "(", "codeuri", ",", "six", ".", "string_types", ")", "and", "codeuri", ".", "startswith", "(", "\"s3://\"", ")", ")", ":", "codeuri", "=", "SamFunctionProvider", ".", "_DEFAULT_CODEURI", "LOG", ".", "warning", "(", "\"Lambda function '%s' has specified S3 location for CodeUri which is unsupported. \"", "\"Using default value of '%s' instead\"", ",", "name", ",", "codeuri", ")", "return", "codeuri" ]
Extracts the SAM Function CodeUri from the Resource Properties Parameters ---------- name str LogicalId of the resource resource_properties dict Dictionary representing the Properties of the Resource code_property_key str Property Key of the code on the Resource Returns ------- str Representing the local code path
[ "Extracts", "the", "SAM", "Function", "CodeUri", "from", "the", "Resource", "Properties" ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/commands/local/lib/sam_function_provider.py#L138-L163
train
awslabs/aws-sam-cli
samcli/commands/local/lib/sam_function_provider.py
SamFunctionProvider._convert_lambda_function_resource
def _convert_lambda_function_resource(name, resource_properties, layers): # pylint: disable=invalid-name """ Converts a AWS::Serverless::Function resource to a Function configuration usable by the provider. :param string name: LogicalID of the resource NOTE: This is *not* the function name because not all functions declare a name :param dict resource_properties: Properties of this resource :return samcli.commands.local.lib.provider.Function: Function configuration """ # CodeUri is set to "." in order to get code locally from current directory. AWS::Lambda::Function's ``Code`` # property does not support specifying a local path codeuri = SamFunctionProvider._extract_lambda_function_code(resource_properties, "Code") LOG.debug("Found Lambda function with name='%s' and CodeUri='%s'", name, codeuri) return Function( name=name, runtime=resource_properties.get("Runtime"), memory=resource_properties.get("MemorySize"), timeout=resource_properties.get("Timeout"), handler=resource_properties.get("Handler"), codeuri=codeuri, environment=resource_properties.get("Environment"), rolearn=resource_properties.get("Role"), layers=layers )
python
def _convert_lambda_function_resource(name, resource_properties, layers): # pylint: disable=invalid-name """ Converts a AWS::Serverless::Function resource to a Function configuration usable by the provider. :param string name: LogicalID of the resource NOTE: This is *not* the function name because not all functions declare a name :param dict resource_properties: Properties of this resource :return samcli.commands.local.lib.provider.Function: Function configuration """ # CodeUri is set to "." in order to get code locally from current directory. AWS::Lambda::Function's ``Code`` # property does not support specifying a local path codeuri = SamFunctionProvider._extract_lambda_function_code(resource_properties, "Code") LOG.debug("Found Lambda function with name='%s' and CodeUri='%s'", name, codeuri) return Function( name=name, runtime=resource_properties.get("Runtime"), memory=resource_properties.get("MemorySize"), timeout=resource_properties.get("Timeout"), handler=resource_properties.get("Handler"), codeuri=codeuri, environment=resource_properties.get("Environment"), rolearn=resource_properties.get("Role"), layers=layers )
[ "def", "_convert_lambda_function_resource", "(", "name", ",", "resource_properties", ",", "layers", ")", ":", "# pylint: disable=invalid-name", "# CodeUri is set to \".\" in order to get code locally from current directory. AWS::Lambda::Function's ``Code``", "# property does not support specifying a local path", "codeuri", "=", "SamFunctionProvider", ".", "_extract_lambda_function_code", "(", "resource_properties", ",", "\"Code\"", ")", "LOG", ".", "debug", "(", "\"Found Lambda function with name='%s' and CodeUri='%s'\"", ",", "name", ",", "codeuri", ")", "return", "Function", "(", "name", "=", "name", ",", "runtime", "=", "resource_properties", ".", "get", "(", "\"Runtime\"", ")", ",", "memory", "=", "resource_properties", ".", "get", "(", "\"MemorySize\"", ")", ",", "timeout", "=", "resource_properties", ".", "get", "(", "\"Timeout\"", ")", ",", "handler", "=", "resource_properties", ".", "get", "(", "\"Handler\"", ")", ",", "codeuri", "=", "codeuri", ",", "environment", "=", "resource_properties", ".", "get", "(", "\"Environment\"", ")", ",", "rolearn", "=", "resource_properties", ".", "get", "(", "\"Role\"", ")", ",", "layers", "=", "layers", ")" ]
Converts a AWS::Serverless::Function resource to a Function configuration usable by the provider. :param string name: LogicalID of the resource NOTE: This is *not* the function name because not all functions declare a name :param dict resource_properties: Properties of this resource :return samcli.commands.local.lib.provider.Function: Function configuration
[ "Converts", "a", "AWS", "::", "Serverless", "::", "Function", "resource", "to", "a", "Function", "configuration", "usable", "by", "the", "provider", "." ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/commands/local/lib/sam_function_provider.py#L166-L192
train
awslabs/aws-sam-cli
samcli/commands/local/lib/sam_function_provider.py
SamFunctionProvider._extract_lambda_function_code
def _extract_lambda_function_code(resource_properties, code_property_key): """ Extracts the Lambda Function Code from the Resource Properties Parameters ---------- resource_properties dict Dictionary representing the Properties of the Resource code_property_key str Property Key of the code on the Resource Returns ------- str Representing the local code path """ codeuri = resource_properties.get(code_property_key, SamFunctionProvider._DEFAULT_CODEURI) if isinstance(codeuri, dict): codeuri = SamFunctionProvider._DEFAULT_CODEURI return codeuri
python
def _extract_lambda_function_code(resource_properties, code_property_key): """ Extracts the Lambda Function Code from the Resource Properties Parameters ---------- resource_properties dict Dictionary representing the Properties of the Resource code_property_key str Property Key of the code on the Resource Returns ------- str Representing the local code path """ codeuri = resource_properties.get(code_property_key, SamFunctionProvider._DEFAULT_CODEURI) if isinstance(codeuri, dict): codeuri = SamFunctionProvider._DEFAULT_CODEURI return codeuri
[ "def", "_extract_lambda_function_code", "(", "resource_properties", ",", "code_property_key", ")", ":", "codeuri", "=", "resource_properties", ".", "get", "(", "code_property_key", ",", "SamFunctionProvider", ".", "_DEFAULT_CODEURI", ")", "if", "isinstance", "(", "codeuri", ",", "dict", ")", ":", "codeuri", "=", "SamFunctionProvider", ".", "_DEFAULT_CODEURI", "return", "codeuri" ]
Extracts the Lambda Function Code from the Resource Properties Parameters ---------- resource_properties dict Dictionary representing the Properties of the Resource code_property_key str Property Key of the code on the Resource Returns ------- str Representing the local code path
[ "Extracts", "the", "Lambda", "Function", "Code", "from", "the", "Resource", "Properties" ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/commands/local/lib/sam_function_provider.py#L195-L217
train
awslabs/aws-sam-cli
samcli/commands/local/lib/sam_function_provider.py
SamFunctionProvider._parse_layer_info
def _parse_layer_info(list_of_layers, resources): """ Creates a list of Layer objects that are represented by the resources and the list of layers Parameters ---------- list_of_layers List(str) List of layers that are defined within the Layers Property on a function resources dict The Resources dictionary defined in a template Returns ------- List(samcli.commands.local.lib.provider.Layer) List of the Layer objects created from the template and layer list defined on the function. The order of the layers does not change. I.E: list_of_layers = ["layer1", "layer2"] the return would be [Layer("layer1"), Layer("layer2")] """ layers = [] for layer in list_of_layers: # If the layer is a string, assume it is the arn if isinstance(layer, six.string_types): layers.append(LayerVersion(layer, None)) continue # In the list of layers that is defined within a template, you can reference a LayerVersion resource. # When running locally, we need to follow that Ref so we can extract the local path to the layer code. if isinstance(layer, dict) and layer.get("Ref"): layer_logical_id = layer.get("Ref") layer_resource = resources.get(layer_logical_id) if not layer_resource or \ layer_resource.get("Type", "") not in (SamFunctionProvider._SERVERLESS_LAYER, SamFunctionProvider._LAMBDA_LAYER): raise InvalidLayerReference() layer_properties = layer_resource.get("Properties", {}) resource_type = layer_resource.get("Type") codeuri = None if resource_type == SamFunctionProvider._LAMBDA_LAYER: codeuri = SamFunctionProvider._extract_lambda_function_code(layer_properties, "Content") if resource_type == SamFunctionProvider._SERVERLESS_LAYER: codeuri = SamFunctionProvider._extract_sam_function_codeuri(layer_logical_id, layer_properties, "ContentUri") layers.append(LayerVersion(layer_logical_id, codeuri)) return layers
python
def _parse_layer_info(list_of_layers, resources): """ Creates a list of Layer objects that are represented by the resources and the list of layers Parameters ---------- list_of_layers List(str) List of layers that are defined within the Layers Property on a function resources dict The Resources dictionary defined in a template Returns ------- List(samcli.commands.local.lib.provider.Layer) List of the Layer objects created from the template and layer list defined on the function. The order of the layers does not change. I.E: list_of_layers = ["layer1", "layer2"] the return would be [Layer("layer1"), Layer("layer2")] """ layers = [] for layer in list_of_layers: # If the layer is a string, assume it is the arn if isinstance(layer, six.string_types): layers.append(LayerVersion(layer, None)) continue # In the list of layers that is defined within a template, you can reference a LayerVersion resource. # When running locally, we need to follow that Ref so we can extract the local path to the layer code. if isinstance(layer, dict) and layer.get("Ref"): layer_logical_id = layer.get("Ref") layer_resource = resources.get(layer_logical_id) if not layer_resource or \ layer_resource.get("Type", "") not in (SamFunctionProvider._SERVERLESS_LAYER, SamFunctionProvider._LAMBDA_LAYER): raise InvalidLayerReference() layer_properties = layer_resource.get("Properties", {}) resource_type = layer_resource.get("Type") codeuri = None if resource_type == SamFunctionProvider._LAMBDA_LAYER: codeuri = SamFunctionProvider._extract_lambda_function_code(layer_properties, "Content") if resource_type == SamFunctionProvider._SERVERLESS_LAYER: codeuri = SamFunctionProvider._extract_sam_function_codeuri(layer_logical_id, layer_properties, "ContentUri") layers.append(LayerVersion(layer_logical_id, codeuri)) return layers
[ "def", "_parse_layer_info", "(", "list_of_layers", ",", "resources", ")", ":", "layers", "=", "[", "]", "for", "layer", "in", "list_of_layers", ":", "# If the layer is a string, assume it is the arn", "if", "isinstance", "(", "layer", ",", "six", ".", "string_types", ")", ":", "layers", ".", "append", "(", "LayerVersion", "(", "layer", ",", "None", ")", ")", "continue", "# In the list of layers that is defined within a template, you can reference a LayerVersion resource.", "# When running locally, we need to follow that Ref so we can extract the local path to the layer code.", "if", "isinstance", "(", "layer", ",", "dict", ")", "and", "layer", ".", "get", "(", "\"Ref\"", ")", ":", "layer_logical_id", "=", "layer", ".", "get", "(", "\"Ref\"", ")", "layer_resource", "=", "resources", ".", "get", "(", "layer_logical_id", ")", "if", "not", "layer_resource", "or", "layer_resource", ".", "get", "(", "\"Type\"", ",", "\"\"", ")", "not", "in", "(", "SamFunctionProvider", ".", "_SERVERLESS_LAYER", ",", "SamFunctionProvider", ".", "_LAMBDA_LAYER", ")", ":", "raise", "InvalidLayerReference", "(", ")", "layer_properties", "=", "layer_resource", ".", "get", "(", "\"Properties\"", ",", "{", "}", ")", "resource_type", "=", "layer_resource", ".", "get", "(", "\"Type\"", ")", "codeuri", "=", "None", "if", "resource_type", "==", "SamFunctionProvider", ".", "_LAMBDA_LAYER", ":", "codeuri", "=", "SamFunctionProvider", ".", "_extract_lambda_function_code", "(", "layer_properties", ",", "\"Content\"", ")", "if", "resource_type", "==", "SamFunctionProvider", ".", "_SERVERLESS_LAYER", ":", "codeuri", "=", "SamFunctionProvider", ".", "_extract_sam_function_codeuri", "(", "layer_logical_id", ",", "layer_properties", ",", "\"ContentUri\"", ")", "layers", ".", "append", "(", "LayerVersion", "(", "layer_logical_id", ",", "codeuri", ")", ")", "return", "layers" ]
Creates a list of Layer objects that are represented by the resources and the list of layers Parameters ---------- list_of_layers List(str) List of layers that are defined within the Layers Property on a function resources dict The Resources dictionary defined in a template Returns ------- List(samcli.commands.local.lib.provider.Layer) List of the Layer objects created from the template and layer list defined on the function. The order of the layers does not change. I.E: list_of_layers = ["layer1", "layer2"] the return would be [Layer("layer1"), Layer("layer2")]
[ "Creates", "a", "list", "of", "Layer", "objects", "that", "are", "represented", "by", "the", "resources", "and", "the", "list", "of", "layers" ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/commands/local/lib/sam_function_provider.py#L220-L270
train
awslabs/aws-sam-cli
samcli/local/lambdafn/env_vars.py
EnvironmentVariables.resolve
def resolve(self): """ Resolves the values from different sources and returns a dict of environment variables to use when running the function locally. :return dict: Dict where key is the variable name and value is the value of the variable. Both key and values are strings """ # AWS_* variables must always be passed to the function, but user has the choice to override them result = self._get_aws_variables() # Default value for the variable gets lowest priority for name, value in self.variables.items(): # Shell environment values, second priority if name in self.shell_env_values: value = self.shell_env_values[name] # Overridden values, highest priority if name in self.override_values: value = self.override_values[name] # Any value must be a string when passed to Lambda runtime. # Runtime expects a Map<String, String> for environment variables result[name] = self._stringify_value(value) return result
python
def resolve(self): """ Resolves the values from different sources and returns a dict of environment variables to use when running the function locally. :return dict: Dict where key is the variable name and value is the value of the variable. Both key and values are strings """ # AWS_* variables must always be passed to the function, but user has the choice to override them result = self._get_aws_variables() # Default value for the variable gets lowest priority for name, value in self.variables.items(): # Shell environment values, second priority if name in self.shell_env_values: value = self.shell_env_values[name] # Overridden values, highest priority if name in self.override_values: value = self.override_values[name] # Any value must be a string when passed to Lambda runtime. # Runtime expects a Map<String, String> for environment variables result[name] = self._stringify_value(value) return result
[ "def", "resolve", "(", "self", ")", ":", "# AWS_* variables must always be passed to the function, but user has the choice to override them", "result", "=", "self", ".", "_get_aws_variables", "(", ")", "# Default value for the variable gets lowest priority", "for", "name", ",", "value", "in", "self", ".", "variables", ".", "items", "(", ")", ":", "# Shell environment values, second priority", "if", "name", "in", "self", ".", "shell_env_values", ":", "value", "=", "self", ".", "shell_env_values", "[", "name", "]", "# Overridden values, highest priority", "if", "name", "in", "self", ".", "override_values", ":", "value", "=", "self", ".", "override_values", "[", "name", "]", "# Any value must be a string when passed to Lambda runtime.", "# Runtime expects a Map<String, String> for environment variables", "result", "[", "name", "]", "=", "self", ".", "_stringify_value", "(", "value", ")", "return", "result" ]
Resolves the values from different sources and returns a dict of environment variables to use when running the function locally. :return dict: Dict where key is the variable name and value is the value of the variable. Both key and values are strings
[ "Resolves", "the", "values", "from", "different", "sources", "and", "returns", "a", "dict", "of", "environment", "variables", "to", "use", "when", "running", "the", "function", "locally", "." ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/local/lambdafn/env_vars.py#L77-L104
train
awslabs/aws-sam-cli
samcli/local/lambdafn/env_vars.py
EnvironmentVariables._get_aws_variables
def _get_aws_variables(self): """ Returns the AWS specific environment variables that should be available in the Lambda runtime. They are prefixed it "AWS_*". :return dict: Name and value of AWS environment variable """ result = { # Variable that says this function is running in Local Lambda "AWS_SAM_LOCAL": "true", # Function configuration "AWS_LAMBDA_FUNCTION_MEMORY_SIZE": str(self.memory), "AWS_LAMBDA_FUNCTION_TIMEOUT": str(self.timeout), "AWS_LAMBDA_FUNCTION_HANDLER": str(self._function["handler"]), # AWS Credentials - Use the input credentials or use the defaults "AWS_REGION": self.aws_creds.get("region", self._DEFAULT_AWS_CREDS["region"]), "AWS_DEFAULT_REGION": self.aws_creds.get("region", self._DEFAULT_AWS_CREDS["region"]), "AWS_ACCESS_KEY_ID": self.aws_creds.get("key", self._DEFAULT_AWS_CREDS["key"]), "AWS_SECRET_ACCESS_KEY": self.aws_creds.get("secret", self._DEFAULT_AWS_CREDS["secret"]) # Additional variables we don't fill in # "AWS_ACCOUNT_ID=" # "AWS_LAMBDA_EVENT_BODY=", # "AWS_LAMBDA_FUNCTION_NAME=", # "AWS_LAMBDA_FUNCTION_VERSION=", } # Session Token should be added **only** if the input creds have a token and the value is not empty. if self.aws_creds.get("sessiontoken"): result["AWS_SESSION_TOKEN"] = self.aws_creds.get("sessiontoken") return result
python
def _get_aws_variables(self): """ Returns the AWS specific environment variables that should be available in the Lambda runtime. They are prefixed it "AWS_*". :return dict: Name and value of AWS environment variable """ result = { # Variable that says this function is running in Local Lambda "AWS_SAM_LOCAL": "true", # Function configuration "AWS_LAMBDA_FUNCTION_MEMORY_SIZE": str(self.memory), "AWS_LAMBDA_FUNCTION_TIMEOUT": str(self.timeout), "AWS_LAMBDA_FUNCTION_HANDLER": str(self._function["handler"]), # AWS Credentials - Use the input credentials or use the defaults "AWS_REGION": self.aws_creds.get("region", self._DEFAULT_AWS_CREDS["region"]), "AWS_DEFAULT_REGION": self.aws_creds.get("region", self._DEFAULT_AWS_CREDS["region"]), "AWS_ACCESS_KEY_ID": self.aws_creds.get("key", self._DEFAULT_AWS_CREDS["key"]), "AWS_SECRET_ACCESS_KEY": self.aws_creds.get("secret", self._DEFAULT_AWS_CREDS["secret"]) # Additional variables we don't fill in # "AWS_ACCOUNT_ID=" # "AWS_LAMBDA_EVENT_BODY=", # "AWS_LAMBDA_FUNCTION_NAME=", # "AWS_LAMBDA_FUNCTION_VERSION=", } # Session Token should be added **only** if the input creds have a token and the value is not empty. if self.aws_creds.get("sessiontoken"): result["AWS_SESSION_TOKEN"] = self.aws_creds.get("sessiontoken") return result
[ "def", "_get_aws_variables", "(", "self", ")", ":", "result", "=", "{", "# Variable that says this function is running in Local Lambda", "\"AWS_SAM_LOCAL\"", ":", "\"true\"", ",", "# Function configuration", "\"AWS_LAMBDA_FUNCTION_MEMORY_SIZE\"", ":", "str", "(", "self", ".", "memory", ")", ",", "\"AWS_LAMBDA_FUNCTION_TIMEOUT\"", ":", "str", "(", "self", ".", "timeout", ")", ",", "\"AWS_LAMBDA_FUNCTION_HANDLER\"", ":", "str", "(", "self", ".", "_function", "[", "\"handler\"", "]", ")", ",", "# AWS Credentials - Use the input credentials or use the defaults", "\"AWS_REGION\"", ":", "self", ".", "aws_creds", ".", "get", "(", "\"region\"", ",", "self", ".", "_DEFAULT_AWS_CREDS", "[", "\"region\"", "]", ")", ",", "\"AWS_DEFAULT_REGION\"", ":", "self", ".", "aws_creds", ".", "get", "(", "\"region\"", ",", "self", ".", "_DEFAULT_AWS_CREDS", "[", "\"region\"", "]", ")", ",", "\"AWS_ACCESS_KEY_ID\"", ":", "self", ".", "aws_creds", ".", "get", "(", "\"key\"", ",", "self", ".", "_DEFAULT_AWS_CREDS", "[", "\"key\"", "]", ")", ",", "\"AWS_SECRET_ACCESS_KEY\"", ":", "self", ".", "aws_creds", ".", "get", "(", "\"secret\"", ",", "self", ".", "_DEFAULT_AWS_CREDS", "[", "\"secret\"", "]", ")", "# Additional variables we don't fill in", "# \"AWS_ACCOUNT_ID=\"", "# \"AWS_LAMBDA_EVENT_BODY=\",", "# \"AWS_LAMBDA_FUNCTION_NAME=\",", "# \"AWS_LAMBDA_FUNCTION_VERSION=\",", "}", "# Session Token should be added **only** if the input creds have a token and the value is not empty.", "if", "self", ".", "aws_creds", ".", "get", "(", "\"sessiontoken\"", ")", ":", "result", "[", "\"AWS_SESSION_TOKEN\"", "]", "=", "self", ".", "aws_creds", ".", "get", "(", "\"sessiontoken\"", ")", "return", "result" ]
Returns the AWS specific environment variables that should be available in the Lambda runtime. They are prefixed it "AWS_*". :return dict: Name and value of AWS environment variable
[ "Returns", "the", "AWS", "specific", "environment", "variables", "that", "should", "be", "available", "in", "the", "Lambda", "runtime", ".", "They", "are", "prefixed", "it", "AWS_", "*", "." ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/local/lambdafn/env_vars.py#L136-L173
train
awslabs/aws-sam-cli
samcli/local/lambdafn/env_vars.py
EnvironmentVariables._stringify_value
def _stringify_value(self, value): """ This method stringifies values of environment variables. If the value of the method is a list or dictionary, then this method will replace it with empty string. Values of environment variables in Lambda must be a string. List or dictionary usually means they are intrinsic functions which have not been resolved. :param value: Value to stringify :return string: Stringified value """ # List/dict/None values are replaced with a blank if isinstance(value, (dict, list, tuple)) or value is None: result = self._BLANK_VALUE # str(True) will output "True". To maintain backwards compatibility we need to output "true" or "false" elif value is True: result = "true" elif value is False: result = "false" # value is a scalar type like int, str which can be stringified # do not stringify unicode in Py2, Py3 str supports unicode elif sys.version_info.major > 2: result = str(value) elif not isinstance(value, unicode): # noqa: F821 pylint: disable=undefined-variable result = str(value) else: result = value return result
python
def _stringify_value(self, value): """ This method stringifies values of environment variables. If the value of the method is a list or dictionary, then this method will replace it with empty string. Values of environment variables in Lambda must be a string. List or dictionary usually means they are intrinsic functions which have not been resolved. :param value: Value to stringify :return string: Stringified value """ # List/dict/None values are replaced with a blank if isinstance(value, (dict, list, tuple)) or value is None: result = self._BLANK_VALUE # str(True) will output "True". To maintain backwards compatibility we need to output "true" or "false" elif value is True: result = "true" elif value is False: result = "false" # value is a scalar type like int, str which can be stringified # do not stringify unicode in Py2, Py3 str supports unicode elif sys.version_info.major > 2: result = str(value) elif not isinstance(value, unicode): # noqa: F821 pylint: disable=undefined-variable result = str(value) else: result = value return result
[ "def", "_stringify_value", "(", "self", ",", "value", ")", ":", "# List/dict/None values are replaced with a blank", "if", "isinstance", "(", "value", ",", "(", "dict", ",", "list", ",", "tuple", ")", ")", "or", "value", "is", "None", ":", "result", "=", "self", ".", "_BLANK_VALUE", "# str(True) will output \"True\". To maintain backwards compatibility we need to output \"true\" or \"false\"", "elif", "value", "is", "True", ":", "result", "=", "\"true\"", "elif", "value", "is", "False", ":", "result", "=", "\"false\"", "# value is a scalar type like int, str which can be stringified", "# do not stringify unicode in Py2, Py3 str supports unicode", "elif", "sys", ".", "version_info", ".", "major", ">", "2", ":", "result", "=", "str", "(", "value", ")", "elif", "not", "isinstance", "(", "value", ",", "unicode", ")", ":", "# noqa: F821 pylint: disable=undefined-variable", "result", "=", "str", "(", "value", ")", "else", ":", "result", "=", "value", "return", "result" ]
This method stringifies values of environment variables. If the value of the method is a list or dictionary, then this method will replace it with empty string. Values of environment variables in Lambda must be a string. List or dictionary usually means they are intrinsic functions which have not been resolved. :param value: Value to stringify :return string: Stringified value
[ "This", "method", "stringifies", "values", "of", "environment", "variables", ".", "If", "the", "value", "of", "the", "method", "is", "a", "list", "or", "dictionary", "then", "this", "method", "will", "replace", "it", "with", "empty", "string", ".", "Values", "of", "environment", "variables", "in", "Lambda", "must", "be", "a", "string", ".", "List", "or", "dictionary", "usually", "means", "they", "are", "intrinsic", "functions", "which", "have", "not", "been", "resolved", "." ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/local/lambdafn/env_vars.py#L175-L204
train
awslabs/aws-sam-cli
samcli/local/docker/container.py
Container.create
def create(self): """ Calls Docker API to creates the Docker container instance. Creating the container does *not* run the container. Use ``start`` method to run the container :return string: ID of the created container :raise RuntimeError: If this method is called after a container already has been created """ if self.is_created(): raise RuntimeError("This container already exists. Cannot create again.") LOG.info("Mounting %s as %s:ro,delegated inside runtime container", self._host_dir, self._working_dir) kwargs = { "command": self._cmd, "working_dir": self._working_dir, "volumes": { self._host_dir: { # Mount the host directory as "read only" directory inside container at working_dir # https://docs.docker.com/storage/bind-mounts # Mount the host directory as "read only" inside container "bind": self._working_dir, "mode": "ro,delegated" } }, # We are not running an interactive shell here. "tty": False } if self._container_opts: kwargs.update(self._container_opts) if self._additional_volumes: kwargs["volumes"].update(self._additional_volumes) # Make sure all mounts are of posix path style. kwargs["volumes"] = {to_posix_path(host_dir): mount for host_dir, mount in kwargs["volumes"].items()} if self._env_vars: kwargs["environment"] = self._env_vars if self._exposed_ports: kwargs["ports"] = self._exposed_ports if self._entrypoint: kwargs["entrypoint"] = self._entrypoint if self._memory_limit_mb: # Ex: 128m => 128MB kwargs["mem_limit"] = "{}m".format(self._memory_limit_mb) if self.network_id == 'host': kwargs["network_mode"] = self.network_id real_container = self.docker_client.containers.create(self._image, **kwargs) self.id = real_container.id if self.network_id and self.network_id != 'host': network = self.docker_client.networks.get(self.network_id) network.connect(self.id) return self.id
python
def create(self): """ Calls Docker API to creates the Docker container instance. Creating the container does *not* run the container. Use ``start`` method to run the container :return string: ID of the created container :raise RuntimeError: If this method is called after a container already has been created """ if self.is_created(): raise RuntimeError("This container already exists. Cannot create again.") LOG.info("Mounting %s as %s:ro,delegated inside runtime container", self._host_dir, self._working_dir) kwargs = { "command": self._cmd, "working_dir": self._working_dir, "volumes": { self._host_dir: { # Mount the host directory as "read only" directory inside container at working_dir # https://docs.docker.com/storage/bind-mounts # Mount the host directory as "read only" inside container "bind": self._working_dir, "mode": "ro,delegated" } }, # We are not running an interactive shell here. "tty": False } if self._container_opts: kwargs.update(self._container_opts) if self._additional_volumes: kwargs["volumes"].update(self._additional_volumes) # Make sure all mounts are of posix path style. kwargs["volumes"] = {to_posix_path(host_dir): mount for host_dir, mount in kwargs["volumes"].items()} if self._env_vars: kwargs["environment"] = self._env_vars if self._exposed_ports: kwargs["ports"] = self._exposed_ports if self._entrypoint: kwargs["entrypoint"] = self._entrypoint if self._memory_limit_mb: # Ex: 128m => 128MB kwargs["mem_limit"] = "{}m".format(self._memory_limit_mb) if self.network_id == 'host': kwargs["network_mode"] = self.network_id real_container = self.docker_client.containers.create(self._image, **kwargs) self.id = real_container.id if self.network_id and self.network_id != 'host': network = self.docker_client.networks.get(self.network_id) network.connect(self.id) return self.id
[ "def", "create", "(", "self", ")", ":", "if", "self", ".", "is_created", "(", ")", ":", "raise", "RuntimeError", "(", "\"This container already exists. Cannot create again.\"", ")", "LOG", ".", "info", "(", "\"Mounting %s as %s:ro,delegated inside runtime container\"", ",", "self", ".", "_host_dir", ",", "self", ".", "_working_dir", ")", "kwargs", "=", "{", "\"command\"", ":", "self", ".", "_cmd", ",", "\"working_dir\"", ":", "self", ".", "_working_dir", ",", "\"volumes\"", ":", "{", "self", ".", "_host_dir", ":", "{", "# Mount the host directory as \"read only\" directory inside container at working_dir", "# https://docs.docker.com/storage/bind-mounts", "# Mount the host directory as \"read only\" inside container", "\"bind\"", ":", "self", ".", "_working_dir", ",", "\"mode\"", ":", "\"ro,delegated\"", "}", "}", ",", "# We are not running an interactive shell here.", "\"tty\"", ":", "False", "}", "if", "self", ".", "_container_opts", ":", "kwargs", ".", "update", "(", "self", ".", "_container_opts", ")", "if", "self", ".", "_additional_volumes", ":", "kwargs", "[", "\"volumes\"", "]", ".", "update", "(", "self", ".", "_additional_volumes", ")", "# Make sure all mounts are of posix path style.", "kwargs", "[", "\"volumes\"", "]", "=", "{", "to_posix_path", "(", "host_dir", ")", ":", "mount", "for", "host_dir", ",", "mount", "in", "kwargs", "[", "\"volumes\"", "]", ".", "items", "(", ")", "}", "if", "self", ".", "_env_vars", ":", "kwargs", "[", "\"environment\"", "]", "=", "self", ".", "_env_vars", "if", "self", ".", "_exposed_ports", ":", "kwargs", "[", "\"ports\"", "]", "=", "self", ".", "_exposed_ports", "if", "self", ".", "_entrypoint", ":", "kwargs", "[", "\"entrypoint\"", "]", "=", "self", ".", "_entrypoint", "if", "self", ".", "_memory_limit_mb", ":", "# Ex: 128m => 128MB", "kwargs", "[", "\"mem_limit\"", "]", "=", "\"{}m\"", ".", "format", "(", "self", ".", "_memory_limit_mb", ")", "if", "self", ".", "network_id", "==", "'host'", ":", "kwargs", "[", "\"network_mode\"", "]", "=", "self", ".", "network_id", "real_container", "=", "self", ".", "docker_client", ".", "containers", ".", "create", "(", "self", ".", "_image", ",", "*", "*", "kwargs", ")", "self", ".", "id", "=", "real_container", ".", "id", "if", "self", ".", "network_id", "and", "self", ".", "network_id", "!=", "'host'", ":", "network", "=", "self", ".", "docker_client", ".", "networks", ".", "get", "(", "self", ".", "network_id", ")", "network", ".", "connect", "(", "self", ".", "id", ")", "return", "self", ".", "id" ]
Calls Docker API to creates the Docker container instance. Creating the container does *not* run the container. Use ``start`` method to run the container :return string: ID of the created container :raise RuntimeError: If this method is called after a container already has been created
[ "Calls", "Docker", "API", "to", "creates", "the", "Docker", "container", "instance", ".", "Creating", "the", "container", "does", "*", "not", "*", "run", "the", "container", ".", "Use", "start", "method", "to", "run", "the", "container" ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/local/docker/container.py#L75-L137
train
awslabs/aws-sam-cli
samcli/local/docker/container.py
Container.delete
def delete(self): """ Removes a container that was created earlier. """ if not self.is_created(): LOG.debug("Container was not created. Skipping deletion") return try: self.docker_client.containers\ .get(self.id)\ .remove(force=True) # Remove a container, even if it is running except docker.errors.NotFound: # Container is already not there LOG.debug("Container with ID %s does not exist. Skipping deletion", self.id) except docker.errors.APIError as ex: msg = str(ex) removal_in_progress = ("removal of container" in msg) and ("is already in progress" in msg) # When removal is already started, Docker API will throw an exception # Skip such exceptions. if not removal_in_progress: raise ex self.id = None
python
def delete(self): """ Removes a container that was created earlier. """ if not self.is_created(): LOG.debug("Container was not created. Skipping deletion") return try: self.docker_client.containers\ .get(self.id)\ .remove(force=True) # Remove a container, even if it is running except docker.errors.NotFound: # Container is already not there LOG.debug("Container with ID %s does not exist. Skipping deletion", self.id) except docker.errors.APIError as ex: msg = str(ex) removal_in_progress = ("removal of container" in msg) and ("is already in progress" in msg) # When removal is already started, Docker API will throw an exception # Skip such exceptions. if not removal_in_progress: raise ex self.id = None
[ "def", "delete", "(", "self", ")", ":", "if", "not", "self", ".", "is_created", "(", ")", ":", "LOG", ".", "debug", "(", "\"Container was not created. Skipping deletion\"", ")", "return", "try", ":", "self", ".", "docker_client", ".", "containers", ".", "get", "(", "self", ".", "id", ")", ".", "remove", "(", "force", "=", "True", ")", "# Remove a container, even if it is running", "except", "docker", ".", "errors", ".", "NotFound", ":", "# Container is already not there", "LOG", ".", "debug", "(", "\"Container with ID %s does not exist. Skipping deletion\"", ",", "self", ".", "id", ")", "except", "docker", ".", "errors", ".", "APIError", "as", "ex", ":", "msg", "=", "str", "(", "ex", ")", "removal_in_progress", "=", "(", "\"removal of container\"", "in", "msg", ")", "and", "(", "\"is already in progress\"", "in", "msg", ")", "# When removal is already started, Docker API will throw an exception", "# Skip such exceptions.", "if", "not", "removal_in_progress", ":", "raise", "ex", "self", ".", "id", "=", "None" ]
Removes a container that was created earlier.
[ "Removes", "a", "container", "that", "was", "created", "earlier", "." ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/local/docker/container.py#L139-L163
train
awslabs/aws-sam-cli
samcli/local/docker/container.py
Container.start
def start(self, input_data=None): """ Calls Docker API to start the container. The container must be created at the first place to run. It waits for the container to complete, fetches both stdout and stderr logs and returns through the given streams. Parameters ---------- input_data Optional. Input data sent to the container through container's stdin. """ if input_data: raise ValueError("Passing input through container's stdin is not supported") if not self.is_created(): raise RuntimeError("Container does not exist. Cannot start this container") # Get the underlying container instance from Docker API real_container = self.docker_client.containers.get(self.id) # Start the container real_container.start()
python
def start(self, input_data=None): """ Calls Docker API to start the container. The container must be created at the first place to run. It waits for the container to complete, fetches both stdout and stderr logs and returns through the given streams. Parameters ---------- input_data Optional. Input data sent to the container through container's stdin. """ if input_data: raise ValueError("Passing input through container's stdin is not supported") if not self.is_created(): raise RuntimeError("Container does not exist. Cannot start this container") # Get the underlying container instance from Docker API real_container = self.docker_client.containers.get(self.id) # Start the container real_container.start()
[ "def", "start", "(", "self", ",", "input_data", "=", "None", ")", ":", "if", "input_data", ":", "raise", "ValueError", "(", "\"Passing input through container's stdin is not supported\"", ")", "if", "not", "self", ".", "is_created", "(", ")", ":", "raise", "RuntimeError", "(", "\"Container does not exist. Cannot start this container\"", ")", "# Get the underlying container instance from Docker API", "real_container", "=", "self", ".", "docker_client", ".", "containers", ".", "get", "(", "self", ".", "id", ")", "# Start the container", "real_container", ".", "start", "(", ")" ]
Calls Docker API to start the container. The container must be created at the first place to run. It waits for the container to complete, fetches both stdout and stderr logs and returns through the given streams. Parameters ---------- input_data Optional. Input data sent to the container through container's stdin.
[ "Calls", "Docker", "API", "to", "start", "the", "container", ".", "The", "container", "must", "be", "created", "at", "the", "first", "place", "to", "run", ".", "It", "waits", "for", "the", "container", "to", "complete", "fetches", "both", "stdout", "and", "stderr", "logs", "and", "returns", "through", "the", "given", "streams", "." ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/local/docker/container.py#L165-L187
train
awslabs/aws-sam-cli
samcli/local/docker/container.py
Container._write_container_output
def _write_container_output(output_itr, stdout=None, stderr=None): """ Based on the data returned from the Container output, via the iterator, write it to the appropriate streams Parameters ---------- output_itr: Iterator Iterator returned by the Docker Attach command stdout: samcli.lib.utils.stream_writer.StreamWriter, optional Stream writer to write stdout data from Container into stderr: samcli.lib.utils.stream_writer.StreamWriter, optional Stream writer to write stderr data from the Container into """ # Iterator returns a tuple of (frame_type, data) where the frame type determines which stream we write output # to for frame_type, data in output_itr: if frame_type == Container._STDOUT_FRAME_TYPE and stdout: # Frame type 1 is stdout data. stdout.write(data) elif frame_type == Container._STDERR_FRAME_TYPE and stderr: # Frame type 2 is stderr data. stderr.write(data) else: # Either an unsupported frame type or stream for this frame type is not configured LOG.debug("Dropping Docker container output because of unconfigured frame type. " "Frame Type: %s. Data: %s", frame_type, data)
python
def _write_container_output(output_itr, stdout=None, stderr=None): """ Based on the data returned from the Container output, via the iterator, write it to the appropriate streams Parameters ---------- output_itr: Iterator Iterator returned by the Docker Attach command stdout: samcli.lib.utils.stream_writer.StreamWriter, optional Stream writer to write stdout data from Container into stderr: samcli.lib.utils.stream_writer.StreamWriter, optional Stream writer to write stderr data from the Container into """ # Iterator returns a tuple of (frame_type, data) where the frame type determines which stream we write output # to for frame_type, data in output_itr: if frame_type == Container._STDOUT_FRAME_TYPE and stdout: # Frame type 1 is stdout data. stdout.write(data) elif frame_type == Container._STDERR_FRAME_TYPE and stderr: # Frame type 2 is stderr data. stderr.write(data) else: # Either an unsupported frame type or stream for this frame type is not configured LOG.debug("Dropping Docker container output because of unconfigured frame type. " "Frame Type: %s. Data: %s", frame_type, data)
[ "def", "_write_container_output", "(", "output_itr", ",", "stdout", "=", "None", ",", "stderr", "=", "None", ")", ":", "# Iterator returns a tuple of (frame_type, data) where the frame type determines which stream we write output", "# to", "for", "frame_type", ",", "data", "in", "output_itr", ":", "if", "frame_type", "==", "Container", ".", "_STDOUT_FRAME_TYPE", "and", "stdout", ":", "# Frame type 1 is stdout data.", "stdout", ".", "write", "(", "data", ")", "elif", "frame_type", "==", "Container", ".", "_STDERR_FRAME_TYPE", "and", "stderr", ":", "# Frame type 2 is stderr data.", "stderr", ".", "write", "(", "data", ")", "else", ":", "# Either an unsupported frame type or stream for this frame type is not configured", "LOG", ".", "debug", "(", "\"Dropping Docker container output because of unconfigured frame type. \"", "\"Frame Type: %s. Data: %s\"", ",", "frame_type", ",", "data", ")" ]
Based on the data returned from the Container output, via the iterator, write it to the appropriate streams Parameters ---------- output_itr: Iterator Iterator returned by the Docker Attach command stdout: samcli.lib.utils.stream_writer.StreamWriter, optional Stream writer to write stdout data from Container into stderr: samcli.lib.utils.stream_writer.StreamWriter, optional Stream writer to write stderr data from the Container into
[ "Based", "on", "the", "data", "returned", "from", "the", "Container", "output", "via", "the", "iterator", "write", "it", "to", "the", "appropriate", "streams" ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/local/docker/container.py#L229-L258
train
awslabs/aws-sam-cli
samcli/commands/init/__init__.py
cli
def cli(ctx, location, runtime, dependency_manager, output_dir, name, no_input): """ \b Initialize a serverless application with a SAM template, folder structure for your Lambda functions, connected to an event source such as APIs, S3 Buckets or DynamoDB Tables. This application includes everything you need to get started with serverless and eventually grow into a production scale application. \b This command can initialize a boilerplate serverless app. If you want to create your own template as well as use a custom location please take a look at our official documentation. \b Common usage: \b Initializes a new SAM project using Python 3.6 default template runtime \b $ sam init --runtime python3.6 \b Initializes a new SAM project using Java 8 and Gradle dependency manager \b $ sam init --runtime java8 --dependency-manager gradle \b Initializes a new SAM project using custom template in a Git/Mercurial repository \b # gh being expanded to github url $ sam init --location gh:aws-samples/cookiecutter-aws-sam-python \b $ sam init --location git+ssh://git@github.com/aws-samples/cookiecutter-aws-sam-python.git \b $ sam init --location hg+ssh://hg@bitbucket.org/repo/template-name \b Initializes a new SAM project using custom template in a Zipfile \b $ sam init --location /path/to/template.zip \b $ sam init --location https://example.com/path/to/template.zip \b Initializes a new SAM project using custom template in a local path \b $ sam init --location /path/to/template/folder """ # All logic must be implemented in the `do_cli` method. This helps ease unit tests do_cli(ctx, location, runtime, dependency_manager, output_dir, name, no_input)
python
def cli(ctx, location, runtime, dependency_manager, output_dir, name, no_input): """ \b Initialize a serverless application with a SAM template, folder structure for your Lambda functions, connected to an event source such as APIs, S3 Buckets or DynamoDB Tables. This application includes everything you need to get started with serverless and eventually grow into a production scale application. \b This command can initialize a boilerplate serverless app. If you want to create your own template as well as use a custom location please take a look at our official documentation. \b Common usage: \b Initializes a new SAM project using Python 3.6 default template runtime \b $ sam init --runtime python3.6 \b Initializes a new SAM project using Java 8 and Gradle dependency manager \b $ sam init --runtime java8 --dependency-manager gradle \b Initializes a new SAM project using custom template in a Git/Mercurial repository \b # gh being expanded to github url $ sam init --location gh:aws-samples/cookiecutter-aws-sam-python \b $ sam init --location git+ssh://git@github.com/aws-samples/cookiecutter-aws-sam-python.git \b $ sam init --location hg+ssh://hg@bitbucket.org/repo/template-name \b Initializes a new SAM project using custom template in a Zipfile \b $ sam init --location /path/to/template.zip \b $ sam init --location https://example.com/path/to/template.zip \b Initializes a new SAM project using custom template in a local path \b $ sam init --location /path/to/template/folder """ # All logic must be implemented in the `do_cli` method. This helps ease unit tests do_cli(ctx, location, runtime, dependency_manager, output_dir, name, no_input)
[ "def", "cli", "(", "ctx", ",", "location", ",", "runtime", ",", "dependency_manager", ",", "output_dir", ",", "name", ",", "no_input", ")", ":", "# All logic must be implemented in the `do_cli` method. This helps ease unit tests", "do_cli", "(", "ctx", ",", "location", ",", "runtime", ",", "dependency_manager", ",", "output_dir", ",", "name", ",", "no_input", ")" ]
\b Initialize a serverless application with a SAM template, folder structure for your Lambda functions, connected to an event source such as APIs, S3 Buckets or DynamoDB Tables. This application includes everything you need to get started with serverless and eventually grow into a production scale application. \b This command can initialize a boilerplate serverless app. If you want to create your own template as well as use a custom location please take a look at our official documentation. \b Common usage: \b Initializes a new SAM project using Python 3.6 default template runtime \b $ sam init --runtime python3.6 \b Initializes a new SAM project using Java 8 and Gradle dependency manager \b $ sam init --runtime java8 --dependency-manager gradle \b Initializes a new SAM project using custom template in a Git/Mercurial repository \b # gh being expanded to github url $ sam init --location gh:aws-samples/cookiecutter-aws-sam-python \b $ sam init --location git+ssh://git@github.com/aws-samples/cookiecutter-aws-sam-python.git \b $ sam init --location hg+ssh://hg@bitbucket.org/repo/template-name \b Initializes a new SAM project using custom template in a Zipfile \b $ sam init --location /path/to/template.zip \b $ sam init --location https://example.com/path/to/template.zip \b Initializes a new SAM project using custom template in a local path \b $ sam init --location /path/to/template/folder
[ "\\", "b", "Initialize", "a", "serverless", "application", "with", "a", "SAM", "template", "folder", "structure", "for", "your", "Lambda", "functions", "connected", "to", "an", "event", "source", "such", "as", "APIs", "S3", "Buckets", "or", "DynamoDB", "Tables", ".", "This", "application", "includes", "everything", "you", "need", "to", "get", "started", "with", "serverless", "and", "eventually", "grow", "into", "a", "production", "scale", "application", ".", "\\", "b", "This", "command", "can", "initialize", "a", "boilerplate", "serverless", "app", ".", "If", "you", "want", "to", "create", "your", "own", "template", "as", "well", "as", "use", "a", "custom", "location", "please", "take", "a", "look", "at", "our", "official", "documentation", "." ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/commands/init/__init__.py#L30-L76
train
awslabs/aws-sam-cli
samcli/commands/init/__init__.py
do_cli
def do_cli(ctx, location, runtime, dependency_manager, output_dir, name, no_input): """ Implementation of the ``cli`` method, just separated out for unit testing purposes """ LOG.debug("Init command") click.secho("[+] Initializing project structure...", fg="green") no_build_msg = """ Project generated: {output_dir}/{name} Steps you can take next within the project folder =================================================== [*] Invoke Function: sam local invoke HelloWorldFunction --event event.json [*] Start API Gateway locally: sam local start-api """.format(output_dir=output_dir, name=name) build_msg = """ Project generated: {output_dir}/{name} Steps you can take next within the project folder =================================================== [*] Install dependencies [*] Invoke Function: sam local invoke HelloWorldFunction --event event.json [*] Start API Gateway locally: sam local start-api """.format(output_dir=output_dir, name=name) no_build_step_required = ( "python", "python3.7", "python3.6", "python2.7", "nodejs", "nodejs4.3", "nodejs6.10", "nodejs8.10", "ruby2.5") next_step_msg = no_build_msg if runtime in no_build_step_required else build_msg try: generate_project(location, runtime, dependency_manager, output_dir, name, no_input) if not location: click.secho(next_step_msg, bold=True) click.secho("Read {name}/README.md for further instructions\n".format(name=name), bold=True) click.secho("[*] Project initialization is now complete", fg="green") except GenerateProjectFailedError as e: raise UserException(str(e))
python
def do_cli(ctx, location, runtime, dependency_manager, output_dir, name, no_input): """ Implementation of the ``cli`` method, just separated out for unit testing purposes """ LOG.debug("Init command") click.secho("[+] Initializing project structure...", fg="green") no_build_msg = """ Project generated: {output_dir}/{name} Steps you can take next within the project folder =================================================== [*] Invoke Function: sam local invoke HelloWorldFunction --event event.json [*] Start API Gateway locally: sam local start-api """.format(output_dir=output_dir, name=name) build_msg = """ Project generated: {output_dir}/{name} Steps you can take next within the project folder =================================================== [*] Install dependencies [*] Invoke Function: sam local invoke HelloWorldFunction --event event.json [*] Start API Gateway locally: sam local start-api """.format(output_dir=output_dir, name=name) no_build_step_required = ( "python", "python3.7", "python3.6", "python2.7", "nodejs", "nodejs4.3", "nodejs6.10", "nodejs8.10", "ruby2.5") next_step_msg = no_build_msg if runtime in no_build_step_required else build_msg try: generate_project(location, runtime, dependency_manager, output_dir, name, no_input) if not location: click.secho(next_step_msg, bold=True) click.secho("Read {name}/README.md for further instructions\n".format(name=name), bold=True) click.secho("[*] Project initialization is now complete", fg="green") except GenerateProjectFailedError as e: raise UserException(str(e))
[ "def", "do_cli", "(", "ctx", ",", "location", ",", "runtime", ",", "dependency_manager", ",", "output_dir", ",", "name", ",", "no_input", ")", ":", "LOG", ".", "debug", "(", "\"Init command\"", ")", "click", ".", "secho", "(", "\"[+] Initializing project structure...\"", ",", "fg", "=", "\"green\"", ")", "no_build_msg", "=", "\"\"\"\nProject generated: {output_dir}/{name}\n\nSteps you can take next within the project folder\n===================================================\n[*] Invoke Function: sam local invoke HelloWorldFunction --event event.json\n[*] Start API Gateway locally: sam local start-api\n\"\"\"", ".", "format", "(", "output_dir", "=", "output_dir", ",", "name", "=", "name", ")", "build_msg", "=", "\"\"\"\nProject generated: {output_dir}/{name}\n\nSteps you can take next within the project folder\n===================================================\n[*] Install dependencies\n[*] Invoke Function: sam local invoke HelloWorldFunction --event event.json\n[*] Start API Gateway locally: sam local start-api\n\"\"\"", ".", "format", "(", "output_dir", "=", "output_dir", ",", "name", "=", "name", ")", "no_build_step_required", "=", "(", "\"python\"", ",", "\"python3.7\"", ",", "\"python3.6\"", ",", "\"python2.7\"", ",", "\"nodejs\"", ",", "\"nodejs4.3\"", ",", "\"nodejs6.10\"", ",", "\"nodejs8.10\"", ",", "\"ruby2.5\"", ")", "next_step_msg", "=", "no_build_msg", "if", "runtime", "in", "no_build_step_required", "else", "build_msg", "try", ":", "generate_project", "(", "location", ",", "runtime", ",", "dependency_manager", ",", "output_dir", ",", "name", ",", "no_input", ")", "if", "not", "location", ":", "click", ".", "secho", "(", "next_step_msg", ",", "bold", "=", "True", ")", "click", ".", "secho", "(", "\"Read {name}/README.md for further instructions\\n\"", ".", "format", "(", "name", "=", "name", ")", ",", "bold", "=", "True", ")", "click", ".", "secho", "(", "\"[*] Project initialization is now complete\"", ",", "fg", "=", "\"green\"", ")", "except", "GenerateProjectFailedError", "as", "e", ":", "raise", "UserException", "(", "str", "(", "e", ")", ")" ]
Implementation of the ``cli`` method, just separated out for unit testing purposes
[ "Implementation", "of", "the", "cli", "method", "just", "separated", "out", "for", "unit", "testing", "purposes" ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/commands/init/__init__.py#L79-L116
train
awslabs/aws-sam-cli
samcli/commands/build/command.py
do_cli
def do_cli(function_identifier, # pylint: disable=too-many-locals template, base_dir, build_dir, clean, use_container, manifest_path, docker_network, skip_pull_image, parameter_overrides, mode): """ Implementation of the ``cli`` method """ LOG.debug("'build' command is called") if use_container: LOG.info("Starting Build inside a container") with BuildContext(function_identifier, template, base_dir, build_dir, clean=clean, manifest_path=manifest_path, use_container=use_container, parameter_overrides=parameter_overrides, docker_network=docker_network, skip_pull_image=skip_pull_image, mode=mode) as ctx: try: builder = ApplicationBuilder(ctx.functions_to_build, ctx.build_dir, ctx.base_dir, manifest_path_override=ctx.manifest_path_override, container_manager=ctx.container_manager, mode=ctx.mode) except FunctionNotFound as ex: raise UserException(str(ex)) try: artifacts = builder.build() modified_template = builder.update_template(ctx.template_dict, ctx.original_template_path, artifacts) move_template(ctx.original_template_path, ctx.output_template_path, modified_template) click.secho("\nBuild Succeeded", fg="green") msg = gen_success_msg(os.path.relpath(ctx.build_dir), os.path.relpath(ctx.output_template_path), os.path.abspath(ctx.build_dir) == os.path.abspath(DEFAULT_BUILD_DIR)) click.secho(msg, fg="yellow") except (UnsupportedRuntimeException, BuildError, UnsupportedBuilderLibraryVersionError, ContainerBuildNotSupported) as ex: click.secho("\nBuild Failed", fg="red") raise UserException(str(ex))
python
def do_cli(function_identifier, # pylint: disable=too-many-locals template, base_dir, build_dir, clean, use_container, manifest_path, docker_network, skip_pull_image, parameter_overrides, mode): """ Implementation of the ``cli`` method """ LOG.debug("'build' command is called") if use_container: LOG.info("Starting Build inside a container") with BuildContext(function_identifier, template, base_dir, build_dir, clean=clean, manifest_path=manifest_path, use_container=use_container, parameter_overrides=parameter_overrides, docker_network=docker_network, skip_pull_image=skip_pull_image, mode=mode) as ctx: try: builder = ApplicationBuilder(ctx.functions_to_build, ctx.build_dir, ctx.base_dir, manifest_path_override=ctx.manifest_path_override, container_manager=ctx.container_manager, mode=ctx.mode) except FunctionNotFound as ex: raise UserException(str(ex)) try: artifacts = builder.build() modified_template = builder.update_template(ctx.template_dict, ctx.original_template_path, artifacts) move_template(ctx.original_template_path, ctx.output_template_path, modified_template) click.secho("\nBuild Succeeded", fg="green") msg = gen_success_msg(os.path.relpath(ctx.build_dir), os.path.relpath(ctx.output_template_path), os.path.abspath(ctx.build_dir) == os.path.abspath(DEFAULT_BUILD_DIR)) click.secho(msg, fg="yellow") except (UnsupportedRuntimeException, BuildError, UnsupportedBuilderLibraryVersionError, ContainerBuildNotSupported) as ex: click.secho("\nBuild Failed", fg="red") raise UserException(str(ex))
[ "def", "do_cli", "(", "function_identifier", ",", "# pylint: disable=too-many-locals", "template", ",", "base_dir", ",", "build_dir", ",", "clean", ",", "use_container", ",", "manifest_path", ",", "docker_network", ",", "skip_pull_image", ",", "parameter_overrides", ",", "mode", ")", ":", "LOG", ".", "debug", "(", "\"'build' command is called\"", ")", "if", "use_container", ":", "LOG", ".", "info", "(", "\"Starting Build inside a container\"", ")", "with", "BuildContext", "(", "function_identifier", ",", "template", ",", "base_dir", ",", "build_dir", ",", "clean", "=", "clean", ",", "manifest_path", "=", "manifest_path", ",", "use_container", "=", "use_container", ",", "parameter_overrides", "=", "parameter_overrides", ",", "docker_network", "=", "docker_network", ",", "skip_pull_image", "=", "skip_pull_image", ",", "mode", "=", "mode", ")", "as", "ctx", ":", "try", ":", "builder", "=", "ApplicationBuilder", "(", "ctx", ".", "functions_to_build", ",", "ctx", ".", "build_dir", ",", "ctx", ".", "base_dir", ",", "manifest_path_override", "=", "ctx", ".", "manifest_path_override", ",", "container_manager", "=", "ctx", ".", "container_manager", ",", "mode", "=", "ctx", ".", "mode", ")", "except", "FunctionNotFound", "as", "ex", ":", "raise", "UserException", "(", "str", "(", "ex", ")", ")", "try", ":", "artifacts", "=", "builder", ".", "build", "(", ")", "modified_template", "=", "builder", ".", "update_template", "(", "ctx", ".", "template_dict", ",", "ctx", ".", "original_template_path", ",", "artifacts", ")", "move_template", "(", "ctx", ".", "original_template_path", ",", "ctx", ".", "output_template_path", ",", "modified_template", ")", "click", ".", "secho", "(", "\"\\nBuild Succeeded\"", ",", "fg", "=", "\"green\"", ")", "msg", "=", "gen_success_msg", "(", "os", ".", "path", ".", "relpath", "(", "ctx", ".", "build_dir", ")", ",", "os", ".", "path", ".", "relpath", "(", "ctx", ".", "output_template_path", ")", ",", "os", ".", "path", ".", "abspath", "(", "ctx", ".", "build_dir", ")", "==", "os", ".", "path", ".", "abspath", "(", "DEFAULT_BUILD_DIR", ")", ")", "click", ".", "secho", "(", "msg", ",", "fg", "=", "\"yellow\"", ")", "except", "(", "UnsupportedRuntimeException", ",", "BuildError", ",", "UnsupportedBuilderLibraryVersionError", ",", "ContainerBuildNotSupported", ")", "as", "ex", ":", "click", ".", "secho", "(", "\"\\nBuild Failed\"", ",", "fg", "=", "\"red\"", ")", "raise", "UserException", "(", "str", "(", "ex", ")", ")" ]
Implementation of the ``cli`` method
[ "Implementation", "of", "the", "cli", "method" ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/commands/build/command.py#L106-L168
train
awslabs/aws-sam-cli
samcli/commands/local/lib/swagger/parser.py
SwaggerParser.get_apis
def get_apis(self): """ Parses a swagger document and returns a list of APIs configured in the document. Swagger documents have the following structure { "/path1": { # path "get": { # method "x-amazon-apigateway-integration": { # integration "type": "aws_proxy", # URI contains the Lambda function ARN that needs to be parsed to get Function Name "uri": { "Fn::Sub": "arn:aws:apigateway:aws:lambda:path/2015-03-31/functions/${LambdaFunction.Arn}/..." } } }, "post": { }, }, "/path2": { ... } } Returns ------- list of samcli.commands.local.lib.provider.Api List of APIs that are configured in the Swagger document """ result = [] paths_dict = self.swagger.get("paths", {}) binary_media_types = self.get_binary_media_types() for full_path, path_config in paths_dict.items(): for method, method_config in path_config.items(): function_name = self._get_integration_function_name(method_config) if not function_name: LOG.debug("Lambda function integration not found in Swagger document at path='%s' method='%s'", full_path, method) continue if method.lower() == self._ANY_METHOD_EXTENSION_KEY: # Convert to a more commonly used method notation method = self._ANY_METHOD api = Api(path=full_path, method=method, function_name=function_name, cors=None, binary_media_types=binary_media_types) result.append(api) return result
python
def get_apis(self): """ Parses a swagger document and returns a list of APIs configured in the document. Swagger documents have the following structure { "/path1": { # path "get": { # method "x-amazon-apigateway-integration": { # integration "type": "aws_proxy", # URI contains the Lambda function ARN that needs to be parsed to get Function Name "uri": { "Fn::Sub": "arn:aws:apigateway:aws:lambda:path/2015-03-31/functions/${LambdaFunction.Arn}/..." } } }, "post": { }, }, "/path2": { ... } } Returns ------- list of samcli.commands.local.lib.provider.Api List of APIs that are configured in the Swagger document """ result = [] paths_dict = self.swagger.get("paths", {}) binary_media_types = self.get_binary_media_types() for full_path, path_config in paths_dict.items(): for method, method_config in path_config.items(): function_name = self._get_integration_function_name(method_config) if not function_name: LOG.debug("Lambda function integration not found in Swagger document at path='%s' method='%s'", full_path, method) continue if method.lower() == self._ANY_METHOD_EXTENSION_KEY: # Convert to a more commonly used method notation method = self._ANY_METHOD api = Api(path=full_path, method=method, function_name=function_name, cors=None, binary_media_types=binary_media_types) result.append(api) return result
[ "def", "get_apis", "(", "self", ")", ":", "result", "=", "[", "]", "paths_dict", "=", "self", ".", "swagger", ".", "get", "(", "\"paths\"", ",", "{", "}", ")", "binary_media_types", "=", "self", ".", "get_binary_media_types", "(", ")", "for", "full_path", ",", "path_config", "in", "paths_dict", ".", "items", "(", ")", ":", "for", "method", ",", "method_config", "in", "path_config", ".", "items", "(", ")", ":", "function_name", "=", "self", ".", "_get_integration_function_name", "(", "method_config", ")", "if", "not", "function_name", ":", "LOG", ".", "debug", "(", "\"Lambda function integration not found in Swagger document at path='%s' method='%s'\"", ",", "full_path", ",", "method", ")", "continue", "if", "method", ".", "lower", "(", ")", "==", "self", ".", "_ANY_METHOD_EXTENSION_KEY", ":", "# Convert to a more commonly used method notation", "method", "=", "self", ".", "_ANY_METHOD", "api", "=", "Api", "(", "path", "=", "full_path", ",", "method", "=", "method", ",", "function_name", "=", "function_name", ",", "cors", "=", "None", ",", "binary_media_types", "=", "binary_media_types", ")", "result", ".", "append", "(", "api", ")", "return", "result" ]
Parses a swagger document and returns a list of APIs configured in the document. Swagger documents have the following structure { "/path1": { # path "get": { # method "x-amazon-apigateway-integration": { # integration "type": "aws_proxy", # URI contains the Lambda function ARN that needs to be parsed to get Function Name "uri": { "Fn::Sub": "arn:aws:apigateway:aws:lambda:path/2015-03-31/functions/${LambdaFunction.Arn}/..." } } }, "post": { }, }, "/path2": { ... } } Returns ------- list of samcli.commands.local.lib.provider.Api List of APIs that are configured in the Swagger document
[ "Parses", "a", "swagger", "document", "and", "returns", "a", "list", "of", "APIs", "configured", "in", "the", "document", "." ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/commands/local/lib/swagger/parser.py#L38-L92
train
awslabs/aws-sam-cli
samcli/commands/local/lib/swagger/parser.py
SwaggerParser._get_integration_function_name
def _get_integration_function_name(self, method_config): """ Tries to parse the Lambda Function name from the Integration defined in the method configuration. Integration configuration is defined under the special "x-amazon-apigateway-integration" key. We care only about Lambda integrations, which are of type aws_proxy, and ignore the rest. Integration URI is complex and hard to parse. Hence we do our best to extract function name out of integration URI. If not possible, we return None. Parameters ---------- method_config : dict Dictionary containing the method configuration which might contain integration settings Returns ------- string or None Lambda function name, if possible. None, if not. """ if not isinstance(method_config, dict) or self._INTEGRATION_KEY not in method_config: return None integration = method_config[self._INTEGRATION_KEY] if integration \ and isinstance(integration, dict) \ and integration.get("type") == IntegrationType.aws_proxy.value: # Integration must be "aws_proxy" otherwise we don't care about it return LambdaUri.get_function_name(integration.get("uri"))
python
def _get_integration_function_name(self, method_config): """ Tries to parse the Lambda Function name from the Integration defined in the method configuration. Integration configuration is defined under the special "x-amazon-apigateway-integration" key. We care only about Lambda integrations, which are of type aws_proxy, and ignore the rest. Integration URI is complex and hard to parse. Hence we do our best to extract function name out of integration URI. If not possible, we return None. Parameters ---------- method_config : dict Dictionary containing the method configuration which might contain integration settings Returns ------- string or None Lambda function name, if possible. None, if not. """ if not isinstance(method_config, dict) or self._INTEGRATION_KEY not in method_config: return None integration = method_config[self._INTEGRATION_KEY] if integration \ and isinstance(integration, dict) \ and integration.get("type") == IntegrationType.aws_proxy.value: # Integration must be "aws_proxy" otherwise we don't care about it return LambdaUri.get_function_name(integration.get("uri"))
[ "def", "_get_integration_function_name", "(", "self", ",", "method_config", ")", ":", "if", "not", "isinstance", "(", "method_config", ",", "dict", ")", "or", "self", ".", "_INTEGRATION_KEY", "not", "in", "method_config", ":", "return", "None", "integration", "=", "method_config", "[", "self", ".", "_INTEGRATION_KEY", "]", "if", "integration", "and", "isinstance", "(", "integration", ",", "dict", ")", "and", "integration", ".", "get", "(", "\"type\"", ")", "==", "IntegrationType", ".", "aws_proxy", ".", "value", ":", "# Integration must be \"aws_proxy\" otherwise we don't care about it", "return", "LambdaUri", ".", "get_function_name", "(", "integration", ".", "get", "(", "\"uri\"", ")", ")" ]
Tries to parse the Lambda Function name from the Integration defined in the method configuration. Integration configuration is defined under the special "x-amazon-apigateway-integration" key. We care only about Lambda integrations, which are of type aws_proxy, and ignore the rest. Integration URI is complex and hard to parse. Hence we do our best to extract function name out of integration URI. If not possible, we return None. Parameters ---------- method_config : dict Dictionary containing the method configuration which might contain integration settings Returns ------- string or None Lambda function name, if possible. None, if not.
[ "Tries", "to", "parse", "the", "Lambda", "Function", "name", "from", "the", "Integration", "defined", "in", "the", "method", "configuration", ".", "Integration", "configuration", "is", "defined", "under", "the", "special", "x", "-", "amazon", "-", "apigateway", "-", "integration", "key", ".", "We", "care", "only", "about", "Lambda", "integrations", "which", "are", "of", "type", "aws_proxy", "and", "ignore", "the", "rest", ".", "Integration", "URI", "is", "complex", "and", "hard", "to", "parse", ".", "Hence", "we", "do", "our", "best", "to", "extract", "function", "name", "out", "of", "integration", "URI", ".", "If", "not", "possible", "we", "return", "None", "." ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/commands/local/lib/swagger/parser.py#L94-L121
train
awslabs/aws-sam-cli
samcli/commands/local/start_lambda/cli.py
do_cli
def do_cli(ctx, host, port, template, env_vars, debug_port, debug_args, # pylint: disable=R0914 debugger_path, docker_volume_basedir, docker_network, log_file, layer_cache_basedir, skip_pull_image, force_image_build, parameter_overrides): """ Implementation of the ``cli`` method, just separated out for unit testing purposes """ LOG.debug("local start_lambda command is called") # Pass all inputs to setup necessary context to invoke function locally. # Handler exception raised by the processor for invalid args and print errors try: with InvokeContext(template_file=template, function_identifier=None, # Don't scope to one particular function env_vars_file=env_vars, docker_volume_basedir=docker_volume_basedir, docker_network=docker_network, log_file=log_file, skip_pull_image=skip_pull_image, debug_port=debug_port, debug_args=debug_args, debugger_path=debugger_path, parameter_overrides=parameter_overrides, layer_cache_basedir=layer_cache_basedir, force_image_build=force_image_build, aws_region=ctx.region) as invoke_context: service = LocalLambdaService(lambda_invoke_context=invoke_context, port=port, host=host) service.start() except (InvalidSamDocumentException, OverridesNotWellDefinedError, InvalidLayerReference, DebuggingNotSupported) as ex: raise UserException(str(ex))
python
def do_cli(ctx, host, port, template, env_vars, debug_port, debug_args, # pylint: disable=R0914 debugger_path, docker_volume_basedir, docker_network, log_file, layer_cache_basedir, skip_pull_image, force_image_build, parameter_overrides): """ Implementation of the ``cli`` method, just separated out for unit testing purposes """ LOG.debug("local start_lambda command is called") # Pass all inputs to setup necessary context to invoke function locally. # Handler exception raised by the processor for invalid args and print errors try: with InvokeContext(template_file=template, function_identifier=None, # Don't scope to one particular function env_vars_file=env_vars, docker_volume_basedir=docker_volume_basedir, docker_network=docker_network, log_file=log_file, skip_pull_image=skip_pull_image, debug_port=debug_port, debug_args=debug_args, debugger_path=debugger_path, parameter_overrides=parameter_overrides, layer_cache_basedir=layer_cache_basedir, force_image_build=force_image_build, aws_region=ctx.region) as invoke_context: service = LocalLambdaService(lambda_invoke_context=invoke_context, port=port, host=host) service.start() except (InvalidSamDocumentException, OverridesNotWellDefinedError, InvalidLayerReference, DebuggingNotSupported) as ex: raise UserException(str(ex))
[ "def", "do_cli", "(", "ctx", ",", "host", ",", "port", ",", "template", ",", "env_vars", ",", "debug_port", ",", "debug_args", ",", "# pylint: disable=R0914", "debugger_path", ",", "docker_volume_basedir", ",", "docker_network", ",", "log_file", ",", "layer_cache_basedir", ",", "skip_pull_image", ",", "force_image_build", ",", "parameter_overrides", ")", ":", "LOG", ".", "debug", "(", "\"local start_lambda command is called\"", ")", "# Pass all inputs to setup necessary context to invoke function locally.", "# Handler exception raised by the processor for invalid args and print errors", "try", ":", "with", "InvokeContext", "(", "template_file", "=", "template", ",", "function_identifier", "=", "None", ",", "# Don't scope to one particular function", "env_vars_file", "=", "env_vars", ",", "docker_volume_basedir", "=", "docker_volume_basedir", ",", "docker_network", "=", "docker_network", ",", "log_file", "=", "log_file", ",", "skip_pull_image", "=", "skip_pull_image", ",", "debug_port", "=", "debug_port", ",", "debug_args", "=", "debug_args", ",", "debugger_path", "=", "debugger_path", ",", "parameter_overrides", "=", "parameter_overrides", ",", "layer_cache_basedir", "=", "layer_cache_basedir", ",", "force_image_build", "=", "force_image_build", ",", "aws_region", "=", "ctx", ".", "region", ")", "as", "invoke_context", ":", "service", "=", "LocalLambdaService", "(", "lambda_invoke_context", "=", "invoke_context", ",", "port", "=", "port", ",", "host", "=", "host", ")", "service", ".", "start", "(", ")", "except", "(", "InvalidSamDocumentException", ",", "OverridesNotWellDefinedError", ",", "InvalidLayerReference", ",", "DebuggingNotSupported", ")", "as", "ex", ":", "raise", "UserException", "(", "str", "(", "ex", ")", ")" ]
Implementation of the ``cli`` method, just separated out for unit testing purposes
[ "Implementation", "of", "the", "cli", "method", "just", "separated", "out", "for", "unit", "testing", "purposes" ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/commands/local/start_lambda/cli.py#L76-L113
train
awslabs/aws-sam-cli
samcli/lib/logs/formatter.py
LogsFormatter.do_format
def do_format(self, event_iterable): """ Formats the given CloudWatch Logs Event dictionary as necessary and returns an iterable that will return the formatted string. This can be used to parse and format the events based on context ie. In Lambda Function logs, a formatter may wish to color the "ERROR" keywords red, or highlight a filter keyword separately etc. This method takes an iterable as input and returns an iterable. It does not immediately format the event. Instead, it sets up the formatter chain appropriately and returns the iterable. Actual formatting happens only when the iterable is used by the caller. Parameters ---------- event_iterable : iterable of samcli.lib.logs.event.LogEvent Iterable that returns an object containing information about each log event. Returns ------- iterable of string Iterable that returns a formatted event as a string. """ for operation in self.formatter_chain: # Make sure the operation has access to certain basic objects like colored partial_op = functools.partial(operation, colored=self.colored) event_iterable = imap(partial_op, event_iterable) return event_iterable
python
def do_format(self, event_iterable): """ Formats the given CloudWatch Logs Event dictionary as necessary and returns an iterable that will return the formatted string. This can be used to parse and format the events based on context ie. In Lambda Function logs, a formatter may wish to color the "ERROR" keywords red, or highlight a filter keyword separately etc. This method takes an iterable as input and returns an iterable. It does not immediately format the event. Instead, it sets up the formatter chain appropriately and returns the iterable. Actual formatting happens only when the iterable is used by the caller. Parameters ---------- event_iterable : iterable of samcli.lib.logs.event.LogEvent Iterable that returns an object containing information about each log event. Returns ------- iterable of string Iterable that returns a formatted event as a string. """ for operation in self.formatter_chain: # Make sure the operation has access to certain basic objects like colored partial_op = functools.partial(operation, colored=self.colored) event_iterable = imap(partial_op, event_iterable) return event_iterable
[ "def", "do_format", "(", "self", ",", "event_iterable", ")", ":", "for", "operation", "in", "self", ".", "formatter_chain", ":", "# Make sure the operation has access to certain basic objects like colored", "partial_op", "=", "functools", ".", "partial", "(", "operation", ",", "colored", "=", "self", ".", "colored", ")", "event_iterable", "=", "imap", "(", "partial_op", ",", "event_iterable", ")", "return", "event_iterable" ]
Formats the given CloudWatch Logs Event dictionary as necessary and returns an iterable that will return the formatted string. This can be used to parse and format the events based on context ie. In Lambda Function logs, a formatter may wish to color the "ERROR" keywords red, or highlight a filter keyword separately etc. This method takes an iterable as input and returns an iterable. It does not immediately format the event. Instead, it sets up the formatter chain appropriately and returns the iterable. Actual formatting happens only when the iterable is used by the caller. Parameters ---------- event_iterable : iterable of samcli.lib.logs.event.LogEvent Iterable that returns an object containing information about each log event. Returns ------- iterable of string Iterable that returns a formatted event as a string.
[ "Formats", "the", "given", "CloudWatch", "Logs", "Event", "dictionary", "as", "necessary", "and", "returns", "an", "iterable", "that", "will", "return", "the", "formatted", "string", ".", "This", "can", "be", "used", "to", "parse", "and", "format", "the", "events", "based", "on", "context", "ie", ".", "In", "Lambda", "Function", "logs", "a", "formatter", "may", "wish", "to", "color", "the", "ERROR", "keywords", "red", "or", "highlight", "a", "filter", "keyword", "separately", "etc", "." ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/lib/logs/formatter.py#L83-L111
train
awslabs/aws-sam-cli
samcli/lib/logs/formatter.py
LogsFormatter._pretty_print_event
def _pretty_print_event(event, colored): """ Basic formatter to convert an event object to string """ event.timestamp = colored.yellow(event.timestamp) event.log_stream_name = colored.cyan(event.log_stream_name) return ' '.join([event.log_stream_name, event.timestamp, event.message])
python
def _pretty_print_event(event, colored): """ Basic formatter to convert an event object to string """ event.timestamp = colored.yellow(event.timestamp) event.log_stream_name = colored.cyan(event.log_stream_name) return ' '.join([event.log_stream_name, event.timestamp, event.message])
[ "def", "_pretty_print_event", "(", "event", ",", "colored", ")", ":", "event", ".", "timestamp", "=", "colored", ".", "yellow", "(", "event", ".", "timestamp", ")", "event", ".", "log_stream_name", "=", "colored", ".", "cyan", "(", "event", ".", "log_stream_name", ")", "return", "' '", ".", "join", "(", "[", "event", ".", "log_stream_name", ",", "event", ".", "timestamp", ",", "event", ".", "message", "]", ")" ]
Basic formatter to convert an event object to string
[ "Basic", "formatter", "to", "convert", "an", "event", "object", "to", "string" ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/lib/logs/formatter.py#L114-L121
train
awslabs/aws-sam-cli
samcli/lib/logs/formatter.py
LambdaLogMsgFormatters.colorize_errors
def colorize_errors(event, colored): """ Highlights some commonly known Lambda error cases in red: - Nodejs process crashes - Lambda function timeouts """ nodejs_crash_msg = "Process exited before completing request" timeout_msg = "Task timed out" if nodejs_crash_msg in event.message \ or timeout_msg in event.message: event.message = colored.red(event.message) return event
python
def colorize_errors(event, colored): """ Highlights some commonly known Lambda error cases in red: - Nodejs process crashes - Lambda function timeouts """ nodejs_crash_msg = "Process exited before completing request" timeout_msg = "Task timed out" if nodejs_crash_msg in event.message \ or timeout_msg in event.message: event.message = colored.red(event.message) return event
[ "def", "colorize_errors", "(", "event", ",", "colored", ")", ":", "nodejs_crash_msg", "=", "\"Process exited before completing request\"", "timeout_msg", "=", "\"Task timed out\"", "if", "nodejs_crash_msg", "in", "event", ".", "message", "or", "timeout_msg", "in", "event", ".", "message", ":", "event", ".", "message", "=", "colored", ".", "red", "(", "event", ".", "message", ")", "return", "event" ]
Highlights some commonly known Lambda error cases in red: - Nodejs process crashes - Lambda function timeouts
[ "Highlights", "some", "commonly", "known", "Lambda", "error", "cases", "in", "red", ":", "-", "Nodejs", "process", "crashes", "-", "Lambda", "function", "timeouts" ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/lib/logs/formatter.py#L132-L146
train
awslabs/aws-sam-cli
samcli/lib/logs/formatter.py
KeywordHighlighter.highlight_keywords
def highlight_keywords(self, event, colored): """ Highlight the keyword in the log statement by drawing an underline """ if self.keyword: highlight = colored.underline(self.keyword) event.message = event.message.replace(self.keyword, highlight) return event
python
def highlight_keywords(self, event, colored): """ Highlight the keyword in the log statement by drawing an underline """ if self.keyword: highlight = colored.underline(self.keyword) event.message = event.message.replace(self.keyword, highlight) return event
[ "def", "highlight_keywords", "(", "self", ",", "event", ",", "colored", ")", ":", "if", "self", ".", "keyword", ":", "highlight", "=", "colored", ".", "underline", "(", "self", ".", "keyword", ")", "event", ".", "message", "=", "event", ".", "message", ".", "replace", "(", "self", ".", "keyword", ",", "highlight", ")", "return", "event" ]
Highlight the keyword in the log statement by drawing an underline
[ "Highlight", "the", "keyword", "in", "the", "log", "statement", "by", "drawing", "an", "underline" ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/lib/logs/formatter.py#L157-L165
train
awslabs/aws-sam-cli
samcli/lib/logs/formatter.py
JSONMsgFormatter.format_json
def format_json(event, colored): """ If the event message is a JSON string, then pretty print the JSON with 2 indents and sort the keys. This makes it very easy to visually parse and search JSON data """ try: if event.message.startswith("{"): msg_dict = json.loads(event.message) event.message = json.dumps(msg_dict, indent=2) except Exception: # Skip if the event message was not JSON pass return event
python
def format_json(event, colored): """ If the event message is a JSON string, then pretty print the JSON with 2 indents and sort the keys. This makes it very easy to visually parse and search JSON data """ try: if event.message.startswith("{"): msg_dict = json.loads(event.message) event.message = json.dumps(msg_dict, indent=2) except Exception: # Skip if the event message was not JSON pass return event
[ "def", "format_json", "(", "event", ",", "colored", ")", ":", "try", ":", "if", "event", ".", "message", ".", "startswith", "(", "\"{\"", ")", ":", "msg_dict", "=", "json", ".", "loads", "(", "event", ".", "message", ")", "event", ".", "message", "=", "json", ".", "dumps", "(", "msg_dict", ",", "indent", "=", "2", ")", "except", "Exception", ":", "# Skip if the event message was not JSON", "pass", "return", "event" ]
If the event message is a JSON string, then pretty print the JSON with 2 indents and sort the keys. This makes it very easy to visually parse and search JSON data
[ "If", "the", "event", "message", "is", "a", "JSON", "string", "then", "pretty", "print", "the", "JSON", "with", "2", "indents", "and", "sort", "the", "keys", ".", "This", "makes", "it", "very", "easy", "to", "visually", "parse", "and", "search", "JSON", "data" ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/lib/logs/formatter.py#L174-L188
train
awslabs/aws-sam-cli
samcli/commands/_utils/template.py
get_template_data
def get_template_data(template_file): """ Read the template file, parse it as JSON/YAML and return the template as a dictionary. Parameters ---------- template_file : string Path to the template to read Returns ------- Template data as a dictionary """ if not pathlib.Path(template_file).exists(): raise ValueError("Template file not found at {}".format(template_file)) with open(template_file, 'r') as fp: try: return yaml_parse(fp.read()) except (ValueError, yaml.YAMLError) as ex: raise ValueError("Failed to parse template: {}".format(str(ex)))
python
def get_template_data(template_file): """ Read the template file, parse it as JSON/YAML and return the template as a dictionary. Parameters ---------- template_file : string Path to the template to read Returns ------- Template data as a dictionary """ if not pathlib.Path(template_file).exists(): raise ValueError("Template file not found at {}".format(template_file)) with open(template_file, 'r') as fp: try: return yaml_parse(fp.read()) except (ValueError, yaml.YAMLError) as ex: raise ValueError("Failed to parse template: {}".format(str(ex)))
[ "def", "get_template_data", "(", "template_file", ")", ":", "if", "not", "pathlib", ".", "Path", "(", "template_file", ")", ".", "exists", "(", ")", ":", "raise", "ValueError", "(", "\"Template file not found at {}\"", ".", "format", "(", "template_file", ")", ")", "with", "open", "(", "template_file", ",", "'r'", ")", "as", "fp", ":", "try", ":", "return", "yaml_parse", "(", "fp", ".", "read", "(", ")", ")", "except", "(", "ValueError", ",", "yaml", ".", "YAMLError", ")", "as", "ex", ":", "raise", "ValueError", "(", "\"Failed to parse template: {}\"", ".", "format", "(", "str", "(", "ex", ")", ")", ")" ]
Read the template file, parse it as JSON/YAML and return the template as a dictionary. Parameters ---------- template_file : string Path to the template to read Returns ------- Template data as a dictionary
[ "Read", "the", "template", "file", "parse", "it", "as", "JSON", "/", "YAML", "and", "return", "the", "template", "as", "a", "dictionary", "." ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/commands/_utils/template.py#L36-L57
train
awslabs/aws-sam-cli
samcli/commands/_utils/template.py
move_template
def move_template(src_template_path, dest_template_path, template_dict): """ Move the SAM/CloudFormation template from ``src_template_path`` to ``dest_template_path``. For convenience, this method accepts a dictionary of template data ``template_dict`` that will be written to the destination instead of reading from the source file. SAM/CloudFormation template can contain certain properties whose value is a relative path to a local file/folder. This path is always relative to the template's location. Before writing the template to ``dest_template_path`, we will update these paths to be relative to the new location. This methods updates resource properties supported by ``aws cloudformation package`` command: https://docs.aws.amazon.com/cli/latest/reference/cloudformation/package.html You must use this method if you are reading a template from one location, modifying it, and writing it back to a different location. Parameters ---------- src_template_path : str Path to the original location of the template dest_template_path : str Path to the destination location where updated template should be written to template_dict : dict Dictionary containing template contents. This dictionary will be updated & written to ``dest`` location. """ original_root = os.path.dirname(src_template_path) new_root = os.path.dirname(dest_template_path) # Next up, we will be writing the template to a different location. Before doing so, we should # update any relative paths in the template to be relative to the new location. modified_template = _update_relative_paths(template_dict, original_root, new_root) with open(dest_template_path, "w") as fp: fp.write(yaml_dump(modified_template))
python
def move_template(src_template_path, dest_template_path, template_dict): """ Move the SAM/CloudFormation template from ``src_template_path`` to ``dest_template_path``. For convenience, this method accepts a dictionary of template data ``template_dict`` that will be written to the destination instead of reading from the source file. SAM/CloudFormation template can contain certain properties whose value is a relative path to a local file/folder. This path is always relative to the template's location. Before writing the template to ``dest_template_path`, we will update these paths to be relative to the new location. This methods updates resource properties supported by ``aws cloudformation package`` command: https://docs.aws.amazon.com/cli/latest/reference/cloudformation/package.html You must use this method if you are reading a template from one location, modifying it, and writing it back to a different location. Parameters ---------- src_template_path : str Path to the original location of the template dest_template_path : str Path to the destination location where updated template should be written to template_dict : dict Dictionary containing template contents. This dictionary will be updated & written to ``dest`` location. """ original_root = os.path.dirname(src_template_path) new_root = os.path.dirname(dest_template_path) # Next up, we will be writing the template to a different location. Before doing so, we should # update any relative paths in the template to be relative to the new location. modified_template = _update_relative_paths(template_dict, original_root, new_root) with open(dest_template_path, "w") as fp: fp.write(yaml_dump(modified_template))
[ "def", "move_template", "(", "src_template_path", ",", "dest_template_path", ",", "template_dict", ")", ":", "original_root", "=", "os", ".", "path", ".", "dirname", "(", "src_template_path", ")", "new_root", "=", "os", ".", "path", ".", "dirname", "(", "dest_template_path", ")", "# Next up, we will be writing the template to a different location. Before doing so, we should", "# update any relative paths in the template to be relative to the new location.", "modified_template", "=", "_update_relative_paths", "(", "template_dict", ",", "original_root", ",", "new_root", ")", "with", "open", "(", "dest_template_path", ",", "\"w\"", ")", "as", "fp", ":", "fp", ".", "write", "(", "yaml_dump", "(", "modified_template", ")", ")" ]
Move the SAM/CloudFormation template from ``src_template_path`` to ``dest_template_path``. For convenience, this method accepts a dictionary of template data ``template_dict`` that will be written to the destination instead of reading from the source file. SAM/CloudFormation template can contain certain properties whose value is a relative path to a local file/folder. This path is always relative to the template's location. Before writing the template to ``dest_template_path`, we will update these paths to be relative to the new location. This methods updates resource properties supported by ``aws cloudformation package`` command: https://docs.aws.amazon.com/cli/latest/reference/cloudformation/package.html You must use this method if you are reading a template from one location, modifying it, and writing it back to a different location. Parameters ---------- src_template_path : str Path to the original location of the template dest_template_path : str Path to the destination location where updated template should be written to template_dict : dict Dictionary containing template contents. This dictionary will be updated & written to ``dest`` location.
[ "Move", "the", "SAM", "/", "CloudFormation", "template", "from", "src_template_path", "to", "dest_template_path", ".", "For", "convenience", "this", "method", "accepts", "a", "dictionary", "of", "template", "data", "template_dict", "that", "will", "be", "written", "to", "the", "destination", "instead", "of", "reading", "from", "the", "source", "file", "." ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/commands/_utils/template.py#L60-L100
train
awslabs/aws-sam-cli
samcli/commands/_utils/template.py
_update_relative_paths
def _update_relative_paths(template_dict, original_root, new_root): """ SAM/CloudFormation template can contain certain properties whose value is a relative path to a local file/folder. This path is usually relative to the template's location. If the template is being moved from original location ``original_root`` to new location ``new_root``, use this method to update these paths to be relative to ``new_root``. After this method is complete, it is safe to write the template to ``new_root`` without breaking any relative paths. This methods updates resource properties supported by ``aws cloudformation package`` command: https://docs.aws.amazon.com/cli/latest/reference/cloudformation/package.html If a property is either an absolute path or a S3 URI, this method will not update them. Parameters ---------- template_dict : dict Dictionary containing template contents. This dictionary will be updated & written to ``dest`` location. original_root : str Path to the directory where all paths were originally set relative to. This is usually the directory containing the template originally new_root : str Path to the new directory that all paths set relative to after this method completes. Returns ------- Updated dictionary """ for resource_type, properties in template_dict.get("Metadata", {}).items(): if resource_type not in _METADATA_WITH_LOCAL_PATHS: # Unknown resource. Skipping continue for path_prop_name in _METADATA_WITH_LOCAL_PATHS[resource_type]: path = properties.get(path_prop_name) updated_path = _resolve_relative_to(path, original_root, new_root) if not updated_path: # This path does not need to get updated continue properties[path_prop_name] = updated_path for _, resource in template_dict.get("Resources", {}).items(): resource_type = resource.get("Type") if resource_type not in _RESOURCES_WITH_LOCAL_PATHS: # Unknown resource. Skipping continue for path_prop_name in _RESOURCES_WITH_LOCAL_PATHS[resource_type]: properties = resource.get("Properties", {}) path = properties.get(path_prop_name) updated_path = _resolve_relative_to(path, original_root, new_root) if not updated_path: # This path does not need to get updated continue properties[path_prop_name] = updated_path # AWS::Includes can be anywhere within the template dictionary. Hence we need to recurse through the # dictionary in a separate method to find and update relative paths in there template_dict = _update_aws_include_relative_path(template_dict, original_root, new_root) return template_dict
python
def _update_relative_paths(template_dict, original_root, new_root): """ SAM/CloudFormation template can contain certain properties whose value is a relative path to a local file/folder. This path is usually relative to the template's location. If the template is being moved from original location ``original_root`` to new location ``new_root``, use this method to update these paths to be relative to ``new_root``. After this method is complete, it is safe to write the template to ``new_root`` without breaking any relative paths. This methods updates resource properties supported by ``aws cloudformation package`` command: https://docs.aws.amazon.com/cli/latest/reference/cloudformation/package.html If a property is either an absolute path or a S3 URI, this method will not update them. Parameters ---------- template_dict : dict Dictionary containing template contents. This dictionary will be updated & written to ``dest`` location. original_root : str Path to the directory where all paths were originally set relative to. This is usually the directory containing the template originally new_root : str Path to the new directory that all paths set relative to after this method completes. Returns ------- Updated dictionary """ for resource_type, properties in template_dict.get("Metadata", {}).items(): if resource_type not in _METADATA_WITH_LOCAL_PATHS: # Unknown resource. Skipping continue for path_prop_name in _METADATA_WITH_LOCAL_PATHS[resource_type]: path = properties.get(path_prop_name) updated_path = _resolve_relative_to(path, original_root, new_root) if not updated_path: # This path does not need to get updated continue properties[path_prop_name] = updated_path for _, resource in template_dict.get("Resources", {}).items(): resource_type = resource.get("Type") if resource_type not in _RESOURCES_WITH_LOCAL_PATHS: # Unknown resource. Skipping continue for path_prop_name in _RESOURCES_WITH_LOCAL_PATHS[resource_type]: properties = resource.get("Properties", {}) path = properties.get(path_prop_name) updated_path = _resolve_relative_to(path, original_root, new_root) if not updated_path: # This path does not need to get updated continue properties[path_prop_name] = updated_path # AWS::Includes can be anywhere within the template dictionary. Hence we need to recurse through the # dictionary in a separate method to find and update relative paths in there template_dict = _update_aws_include_relative_path(template_dict, original_root, new_root) return template_dict
[ "def", "_update_relative_paths", "(", "template_dict", ",", "original_root", ",", "new_root", ")", ":", "for", "resource_type", ",", "properties", "in", "template_dict", ".", "get", "(", "\"Metadata\"", ",", "{", "}", ")", ".", "items", "(", ")", ":", "if", "resource_type", "not", "in", "_METADATA_WITH_LOCAL_PATHS", ":", "# Unknown resource. Skipping", "continue", "for", "path_prop_name", "in", "_METADATA_WITH_LOCAL_PATHS", "[", "resource_type", "]", ":", "path", "=", "properties", ".", "get", "(", "path_prop_name", ")", "updated_path", "=", "_resolve_relative_to", "(", "path", ",", "original_root", ",", "new_root", ")", "if", "not", "updated_path", ":", "# This path does not need to get updated", "continue", "properties", "[", "path_prop_name", "]", "=", "updated_path", "for", "_", ",", "resource", "in", "template_dict", ".", "get", "(", "\"Resources\"", ",", "{", "}", ")", ".", "items", "(", ")", ":", "resource_type", "=", "resource", ".", "get", "(", "\"Type\"", ")", "if", "resource_type", "not", "in", "_RESOURCES_WITH_LOCAL_PATHS", ":", "# Unknown resource. Skipping", "continue", "for", "path_prop_name", "in", "_RESOURCES_WITH_LOCAL_PATHS", "[", "resource_type", "]", ":", "properties", "=", "resource", ".", "get", "(", "\"Properties\"", ",", "{", "}", ")", "path", "=", "properties", ".", "get", "(", "path_prop_name", ")", "updated_path", "=", "_resolve_relative_to", "(", "path", ",", "original_root", ",", "new_root", ")", "if", "not", "updated_path", ":", "# This path does not need to get updated", "continue", "properties", "[", "path_prop_name", "]", "=", "updated_path", "# AWS::Includes can be anywhere within the template dictionary. Hence we need to recurse through the", "# dictionary in a separate method to find and update relative paths in there", "template_dict", "=", "_update_aws_include_relative_path", "(", "template_dict", ",", "original_root", ",", "new_root", ")", "return", "template_dict" ]
SAM/CloudFormation template can contain certain properties whose value is a relative path to a local file/folder. This path is usually relative to the template's location. If the template is being moved from original location ``original_root`` to new location ``new_root``, use this method to update these paths to be relative to ``new_root``. After this method is complete, it is safe to write the template to ``new_root`` without breaking any relative paths. This methods updates resource properties supported by ``aws cloudformation package`` command: https://docs.aws.amazon.com/cli/latest/reference/cloudformation/package.html If a property is either an absolute path or a S3 URI, this method will not update them. Parameters ---------- template_dict : dict Dictionary containing template contents. This dictionary will be updated & written to ``dest`` location. original_root : str Path to the directory where all paths were originally set relative to. This is usually the directory containing the template originally new_root : str Path to the new directory that all paths set relative to after this method completes. Returns ------- Updated dictionary
[ "SAM", "/", "CloudFormation", "template", "can", "contain", "certain", "properties", "whose", "value", "is", "a", "relative", "path", "to", "a", "local", "file", "/", "folder", ".", "This", "path", "is", "usually", "relative", "to", "the", "template", "s", "location", ".", "If", "the", "template", "is", "being", "moved", "from", "original", "location", "original_root", "to", "new", "location", "new_root", "use", "this", "method", "to", "update", "these", "paths", "to", "be", "relative", "to", "new_root", "." ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/commands/_utils/template.py#L103-L177
train
awslabs/aws-sam-cli
samcli/commands/_utils/template.py
_update_aws_include_relative_path
def _update_aws_include_relative_path(template_dict, original_root, new_root): """ Update relative paths in "AWS::Include" directive. This directive can be present at any part of the template, and not just within resources. """ for key, val in template_dict.items(): if key == "Fn::Transform": if isinstance(val, dict) and val.get("Name") == "AWS::Include": path = val.get("Parameters", {}).get("Location", {}) updated_path = _resolve_relative_to(path, original_root, new_root) if not updated_path: # This path does not need to get updated continue val["Parameters"]["Location"] = updated_path # Recurse through all dictionary values elif isinstance(val, dict): _update_aws_include_relative_path(val, original_root, new_root) elif isinstance(val, list): for item in val: if isinstance(item, dict): _update_aws_include_relative_path(item, original_root, new_root) return template_dict
python
def _update_aws_include_relative_path(template_dict, original_root, new_root): """ Update relative paths in "AWS::Include" directive. This directive can be present at any part of the template, and not just within resources. """ for key, val in template_dict.items(): if key == "Fn::Transform": if isinstance(val, dict) and val.get("Name") == "AWS::Include": path = val.get("Parameters", {}).get("Location", {}) updated_path = _resolve_relative_to(path, original_root, new_root) if not updated_path: # This path does not need to get updated continue val["Parameters"]["Location"] = updated_path # Recurse through all dictionary values elif isinstance(val, dict): _update_aws_include_relative_path(val, original_root, new_root) elif isinstance(val, list): for item in val: if isinstance(item, dict): _update_aws_include_relative_path(item, original_root, new_root) return template_dict
[ "def", "_update_aws_include_relative_path", "(", "template_dict", ",", "original_root", ",", "new_root", ")", ":", "for", "key", ",", "val", "in", "template_dict", ".", "items", "(", ")", ":", "if", "key", "==", "\"Fn::Transform\"", ":", "if", "isinstance", "(", "val", ",", "dict", ")", "and", "val", ".", "get", "(", "\"Name\"", ")", "==", "\"AWS::Include\"", ":", "path", "=", "val", ".", "get", "(", "\"Parameters\"", ",", "{", "}", ")", ".", "get", "(", "\"Location\"", ",", "{", "}", ")", "updated_path", "=", "_resolve_relative_to", "(", "path", ",", "original_root", ",", "new_root", ")", "if", "not", "updated_path", ":", "# This path does not need to get updated", "continue", "val", "[", "\"Parameters\"", "]", "[", "\"Location\"", "]", "=", "updated_path", "# Recurse through all dictionary values", "elif", "isinstance", "(", "val", ",", "dict", ")", ":", "_update_aws_include_relative_path", "(", "val", ",", "original_root", ",", "new_root", ")", "elif", "isinstance", "(", "val", ",", "list", ")", ":", "for", "item", "in", "val", ":", "if", "isinstance", "(", "item", ",", "dict", ")", ":", "_update_aws_include_relative_path", "(", "item", ",", "original_root", ",", "new_root", ")", "return", "template_dict" ]
Update relative paths in "AWS::Include" directive. This directive can be present at any part of the template, and not just within resources.
[ "Update", "relative", "paths", "in", "AWS", "::", "Include", "directive", ".", "This", "directive", "can", "be", "present", "at", "any", "part", "of", "the", "template", "and", "not", "just", "within", "resources", "." ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/commands/_utils/template.py#L180-L205
train
awslabs/aws-sam-cli
samcli/commands/_utils/template.py
_resolve_relative_to
def _resolve_relative_to(path, original_root, new_root): """ If the given ``path`` is a relative path, then assume it is relative to ``original_root``. This method will update the path to be resolve it relative to ``new_root`` and return. Examples ------- # Assume a file called template.txt at location /tmp/original/root/template.txt expressed as relative path # We are trying to update it to be relative to /tmp/new/root instead of the /tmp/original/root >>> result = _resolve_relative_to("template.txt", \ "/tmp/original/root", \ "/tmp/new/root") >>> result ../../original/root/template.txt Returns ------- Updated path if the given path is a relative path. None, if the path is not a relative path. """ if not isinstance(path, six.string_types) \ or path.startswith("s3://") \ or os.path.isabs(path): # Value is definitely NOT a relative path. It is either a S3 URi or Absolute path or not a string at all return None # Value is definitely a relative path. Change it relative to the destination directory return os.path.relpath( os.path.normpath(os.path.join(original_root, path)), # Absolute original path w.r.t ``original_root`` new_root)
python
def _resolve_relative_to(path, original_root, new_root): """ If the given ``path`` is a relative path, then assume it is relative to ``original_root``. This method will update the path to be resolve it relative to ``new_root`` and return. Examples ------- # Assume a file called template.txt at location /tmp/original/root/template.txt expressed as relative path # We are trying to update it to be relative to /tmp/new/root instead of the /tmp/original/root >>> result = _resolve_relative_to("template.txt", \ "/tmp/original/root", \ "/tmp/new/root") >>> result ../../original/root/template.txt Returns ------- Updated path if the given path is a relative path. None, if the path is not a relative path. """ if not isinstance(path, six.string_types) \ or path.startswith("s3://") \ or os.path.isabs(path): # Value is definitely NOT a relative path. It is either a S3 URi or Absolute path or not a string at all return None # Value is definitely a relative path. Change it relative to the destination directory return os.path.relpath( os.path.normpath(os.path.join(original_root, path)), # Absolute original path w.r.t ``original_root`` new_root)
[ "def", "_resolve_relative_to", "(", "path", ",", "original_root", ",", "new_root", ")", ":", "if", "not", "isinstance", "(", "path", ",", "six", ".", "string_types", ")", "or", "path", ".", "startswith", "(", "\"s3://\"", ")", "or", "os", ".", "path", ".", "isabs", "(", "path", ")", ":", "# Value is definitely NOT a relative path. It is either a S3 URi or Absolute path or not a string at all", "return", "None", "# Value is definitely a relative path. Change it relative to the destination directory", "return", "os", ".", "path", ".", "relpath", "(", "os", ".", "path", ".", "normpath", "(", "os", ".", "path", ".", "join", "(", "original_root", ",", "path", ")", ")", ",", "# Absolute original path w.r.t ``original_root``", "new_root", ")" ]
If the given ``path`` is a relative path, then assume it is relative to ``original_root``. This method will update the path to be resolve it relative to ``new_root`` and return. Examples ------- # Assume a file called template.txt at location /tmp/original/root/template.txt expressed as relative path # We are trying to update it to be relative to /tmp/new/root instead of the /tmp/original/root >>> result = _resolve_relative_to("template.txt", \ "/tmp/original/root", \ "/tmp/new/root") >>> result ../../original/root/template.txt Returns ------- Updated path if the given path is a relative path. None, if the path is not a relative path.
[ "If", "the", "given", "path", "is", "a", "relative", "path", "then", "assume", "it", "is", "relative", "to", "original_root", ".", "This", "method", "will", "update", "the", "path", "to", "be", "resolve", "it", "relative", "to", "new_root", "and", "return", "." ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/commands/_utils/template.py#L208-L237
train
awslabs/aws-sam-cli
samcli/commands/local/lib/swagger/reader.py
parse_aws_include_transform
def parse_aws_include_transform(data): """ If the input data is an AWS::Include data, then parse and return the location of the included file. AWS::Include transform data usually has the following format: { "Fn::Transform": { "Name": "AWS::Include", "Parameters": { "Location": "s3://MyAmazonS3BucketName/swagger.yaml" } } } Parameters ---------- data : dict Dictionary data to parse Returns ------- str Location of the included file, if available. None, otherwise """ if not data: return if _FN_TRANSFORM not in data: return transform_data = data[_FN_TRANSFORM] name = transform_data.get("Name") location = transform_data.get("Parameters", {}).get("Location") if name == "AWS::Include": LOG.debug("Successfully parsed location from AWS::Include transform: %s", location) return location
python
def parse_aws_include_transform(data): """ If the input data is an AWS::Include data, then parse and return the location of the included file. AWS::Include transform data usually has the following format: { "Fn::Transform": { "Name": "AWS::Include", "Parameters": { "Location": "s3://MyAmazonS3BucketName/swagger.yaml" } } } Parameters ---------- data : dict Dictionary data to parse Returns ------- str Location of the included file, if available. None, otherwise """ if not data: return if _FN_TRANSFORM not in data: return transform_data = data[_FN_TRANSFORM] name = transform_data.get("Name") location = transform_data.get("Parameters", {}).get("Location") if name == "AWS::Include": LOG.debug("Successfully parsed location from AWS::Include transform: %s", location) return location
[ "def", "parse_aws_include_transform", "(", "data", ")", ":", "if", "not", "data", ":", "return", "if", "_FN_TRANSFORM", "not", "in", "data", ":", "return", "transform_data", "=", "data", "[", "_FN_TRANSFORM", "]", "name", "=", "transform_data", ".", "get", "(", "\"Name\"", ")", "location", "=", "transform_data", ".", "get", "(", "\"Parameters\"", ",", "{", "}", ")", ".", "get", "(", "\"Location\"", ")", "if", "name", "==", "\"AWS::Include\"", ":", "LOG", ".", "debug", "(", "\"Successfully parsed location from AWS::Include transform: %s\"", ",", "location", ")", "return", "location" ]
If the input data is an AWS::Include data, then parse and return the location of the included file. AWS::Include transform data usually has the following format: { "Fn::Transform": { "Name": "AWS::Include", "Parameters": { "Location": "s3://MyAmazonS3BucketName/swagger.yaml" } } } Parameters ---------- data : dict Dictionary data to parse Returns ------- str Location of the included file, if available. None, otherwise
[ "If", "the", "input", "data", "is", "an", "AWS", "::", "Include", "data", "then", "parse", "and", "return", "the", "location", "of", "the", "included", "file", "." ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/commands/local/lib/swagger/reader.py#L20-L57
train
awslabs/aws-sam-cli
samcli/commands/local/lib/swagger/reader.py
SamSwaggerReader.read
def read(self): """ Gets the Swagger document from either of the given locations. If we fail to retrieve or parse the Swagger file, this method will return None. Returns ------- dict: Swagger document. None, if we cannot retrieve the document """ swagger = None # First check if there is inline swagger if self.definition_body: swagger = self._read_from_definition_body() if not swagger and self.definition_uri: # If not, then try to download it from the given URI swagger = self._download_swagger(self.definition_uri) return swagger
python
def read(self): """ Gets the Swagger document from either of the given locations. If we fail to retrieve or parse the Swagger file, this method will return None. Returns ------- dict: Swagger document. None, if we cannot retrieve the document """ swagger = None # First check if there is inline swagger if self.definition_body: swagger = self._read_from_definition_body() if not swagger and self.definition_uri: # If not, then try to download it from the given URI swagger = self._download_swagger(self.definition_uri) return swagger
[ "def", "read", "(", "self", ")", ":", "swagger", "=", "None", "# First check if there is inline swagger", "if", "self", ".", "definition_body", ":", "swagger", "=", "self", ".", "_read_from_definition_body", "(", ")", "if", "not", "swagger", "and", "self", ".", "definition_uri", ":", "# If not, then try to download it from the given URI", "swagger", "=", "self", ".", "_download_swagger", "(", "self", ".", "definition_uri", ")", "return", "swagger" ]
Gets the Swagger document from either of the given locations. If we fail to retrieve or parse the Swagger file, this method will return None. Returns ------- dict: Swagger document. None, if we cannot retrieve the document
[ "Gets", "the", "Swagger", "document", "from", "either", "of", "the", "given", "locations", ".", "If", "we", "fail", "to", "retrieve", "or", "parse", "the", "Swagger", "file", "this", "method", "will", "return", "None", "." ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/commands/local/lib/swagger/reader.py#L92-L113
train
awslabs/aws-sam-cli
samcli/commands/local/lib/swagger/reader.py
SamSwaggerReader._read_from_definition_body
def _read_from_definition_body(self): """ Read the Swagger document from DefinitionBody. It could either be an inline Swagger dictionary or an AWS::Include macro that contains location of the included Swagger. In the later case, we will download and parse the Swagger document. Returns ------- dict Swagger document, if we were able to parse. None, otherwise """ # Let's try to parse it as AWS::Include Transform first. If not, then fall back to assuming the Swagger document # was inclined directly into the body location = parse_aws_include_transform(self.definition_body) if location: LOG.debug("Trying to download Swagger from %s", location) return self._download_swagger(location) # Inline Swagger, just return the contents which should already be a dictionary LOG.debug("Detected Inline Swagger definition") return self.definition_body
python
def _read_from_definition_body(self): """ Read the Swagger document from DefinitionBody. It could either be an inline Swagger dictionary or an AWS::Include macro that contains location of the included Swagger. In the later case, we will download and parse the Swagger document. Returns ------- dict Swagger document, if we were able to parse. None, otherwise """ # Let's try to parse it as AWS::Include Transform first. If not, then fall back to assuming the Swagger document # was inclined directly into the body location = parse_aws_include_transform(self.definition_body) if location: LOG.debug("Trying to download Swagger from %s", location) return self._download_swagger(location) # Inline Swagger, just return the contents which should already be a dictionary LOG.debug("Detected Inline Swagger definition") return self.definition_body
[ "def", "_read_from_definition_body", "(", "self", ")", ":", "# Let's try to parse it as AWS::Include Transform first. If not, then fall back to assuming the Swagger document", "# was inclined directly into the body", "location", "=", "parse_aws_include_transform", "(", "self", ".", "definition_body", ")", "if", "location", ":", "LOG", ".", "debug", "(", "\"Trying to download Swagger from %s\"", ",", "location", ")", "return", "self", ".", "_download_swagger", "(", "location", ")", "# Inline Swagger, just return the contents which should already be a dictionary", "LOG", ".", "debug", "(", "\"Detected Inline Swagger definition\"", ")", "return", "self", ".", "definition_body" ]
Read the Swagger document from DefinitionBody. It could either be an inline Swagger dictionary or an AWS::Include macro that contains location of the included Swagger. In the later case, we will download and parse the Swagger document. Returns ------- dict Swagger document, if we were able to parse. None, otherwise
[ "Read", "the", "Swagger", "document", "from", "DefinitionBody", ".", "It", "could", "either", "be", "an", "inline", "Swagger", "dictionary", "or", "an", "AWS", "::", "Include", "macro", "that", "contains", "location", "of", "the", "included", "Swagger", ".", "In", "the", "later", "case", "we", "will", "download", "and", "parse", "the", "Swagger", "document", "." ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/commands/local/lib/swagger/reader.py#L115-L136
train
awslabs/aws-sam-cli
samcli/commands/local/lib/swagger/reader.py
SamSwaggerReader._download_swagger
def _download_swagger(self, location): """ Download the file from given local or remote location and return it Parameters ---------- location : str or dict Local path or S3 path to Swagger file to download. Consult the ``__init__.py`` documentation for specifics on structure of this property. Returns ------- dict or None Downloaded and parsed Swagger document. None, if unable to download """ if not location: return bucket, key, version = self._parse_s3_location(location) if bucket and key: LOG.debug("Downloading Swagger document from Bucket=%s, Key=%s, Version=%s", bucket, key, version) swagger_str = self._download_from_s3(bucket, key, version) return yaml_parse(swagger_str) if not isinstance(location, string_types): # This is not a string and not a S3 Location dictionary. Probably something invalid LOG.debug("Unable to download Swagger file. Invalid location: %s", location) return # ``location`` is a string and not a S3 path. It is probably a local path. Let's resolve relative path if any filepath = location if self.working_dir: # Resolve relative paths, if any, with respect to working directory filepath = os.path.join(self.working_dir, location) if not os.path.exists(filepath): LOG.debug("Unable to download Swagger file. File not found at location %s", filepath) return LOG.debug("Reading Swagger document from local file at %s", filepath) with open(filepath, "r") as fp: return yaml_parse(fp.read())
python
def _download_swagger(self, location): """ Download the file from given local or remote location and return it Parameters ---------- location : str or dict Local path or S3 path to Swagger file to download. Consult the ``__init__.py`` documentation for specifics on structure of this property. Returns ------- dict or None Downloaded and parsed Swagger document. None, if unable to download """ if not location: return bucket, key, version = self._parse_s3_location(location) if bucket and key: LOG.debug("Downloading Swagger document from Bucket=%s, Key=%s, Version=%s", bucket, key, version) swagger_str = self._download_from_s3(bucket, key, version) return yaml_parse(swagger_str) if not isinstance(location, string_types): # This is not a string and not a S3 Location dictionary. Probably something invalid LOG.debug("Unable to download Swagger file. Invalid location: %s", location) return # ``location`` is a string and not a S3 path. It is probably a local path. Let's resolve relative path if any filepath = location if self.working_dir: # Resolve relative paths, if any, with respect to working directory filepath = os.path.join(self.working_dir, location) if not os.path.exists(filepath): LOG.debug("Unable to download Swagger file. File not found at location %s", filepath) return LOG.debug("Reading Swagger document from local file at %s", filepath) with open(filepath, "r") as fp: return yaml_parse(fp.read())
[ "def", "_download_swagger", "(", "self", ",", "location", ")", ":", "if", "not", "location", ":", "return", "bucket", ",", "key", ",", "version", "=", "self", ".", "_parse_s3_location", "(", "location", ")", "if", "bucket", "and", "key", ":", "LOG", ".", "debug", "(", "\"Downloading Swagger document from Bucket=%s, Key=%s, Version=%s\"", ",", "bucket", ",", "key", ",", "version", ")", "swagger_str", "=", "self", ".", "_download_from_s3", "(", "bucket", ",", "key", ",", "version", ")", "return", "yaml_parse", "(", "swagger_str", ")", "if", "not", "isinstance", "(", "location", ",", "string_types", ")", ":", "# This is not a string and not a S3 Location dictionary. Probably something invalid", "LOG", ".", "debug", "(", "\"Unable to download Swagger file. Invalid location: %s\"", ",", "location", ")", "return", "# ``location`` is a string and not a S3 path. It is probably a local path. Let's resolve relative path if any", "filepath", "=", "location", "if", "self", ".", "working_dir", ":", "# Resolve relative paths, if any, with respect to working directory", "filepath", "=", "os", ".", "path", ".", "join", "(", "self", ".", "working_dir", ",", "location", ")", "if", "not", "os", ".", "path", ".", "exists", "(", "filepath", ")", ":", "LOG", ".", "debug", "(", "\"Unable to download Swagger file. File not found at location %s\"", ",", "filepath", ")", "return", "LOG", ".", "debug", "(", "\"Reading Swagger document from local file at %s\"", ",", "filepath", ")", "with", "open", "(", "filepath", ",", "\"r\"", ")", "as", "fp", ":", "return", "yaml_parse", "(", "fp", ".", "read", "(", ")", ")" ]
Download the file from given local or remote location and return it Parameters ---------- location : str or dict Local path or S3 path to Swagger file to download. Consult the ``__init__.py`` documentation for specifics on structure of this property. Returns ------- dict or None Downloaded and parsed Swagger document. None, if unable to download
[ "Download", "the", "file", "from", "given", "local", "or", "remote", "location", "and", "return", "it" ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/commands/local/lib/swagger/reader.py#L138-L180
train
awslabs/aws-sam-cli
samcli/commands/local/lib/swagger/reader.py
SamSwaggerReader._download_from_s3
def _download_from_s3(bucket, key, version=None): """ Download a file from given S3 location, if available. Parameters ---------- bucket : str S3 Bucket name key : str S3 Bucket Key aka file path version : str Optional Version ID of the file Returns ------- str Contents of the file that was downloaded Raises ------ botocore.exceptions.ClientError if we were unable to download the file from S3 """ s3 = boto3.client('s3') extra_args = {} if version: extra_args["VersionId"] = version with tempfile.TemporaryFile() as fp: try: s3.download_fileobj( bucket, key, fp, ExtraArgs=extra_args) # go to start of file fp.seek(0) # Read and return all the contents return fp.read() except botocore.exceptions.ClientError: LOG.error("Unable to download Swagger document from S3 Bucket=%s Key=%s Version=%s", bucket, key, version) raise
python
def _download_from_s3(bucket, key, version=None): """ Download a file from given S3 location, if available. Parameters ---------- bucket : str S3 Bucket name key : str S3 Bucket Key aka file path version : str Optional Version ID of the file Returns ------- str Contents of the file that was downloaded Raises ------ botocore.exceptions.ClientError if we were unable to download the file from S3 """ s3 = boto3.client('s3') extra_args = {} if version: extra_args["VersionId"] = version with tempfile.TemporaryFile() as fp: try: s3.download_fileobj( bucket, key, fp, ExtraArgs=extra_args) # go to start of file fp.seek(0) # Read and return all the contents return fp.read() except botocore.exceptions.ClientError: LOG.error("Unable to download Swagger document from S3 Bucket=%s Key=%s Version=%s", bucket, key, version) raise
[ "def", "_download_from_s3", "(", "bucket", ",", "key", ",", "version", "=", "None", ")", ":", "s3", "=", "boto3", ".", "client", "(", "'s3'", ")", "extra_args", "=", "{", "}", "if", "version", ":", "extra_args", "[", "\"VersionId\"", "]", "=", "version", "with", "tempfile", ".", "TemporaryFile", "(", ")", "as", "fp", ":", "try", ":", "s3", ".", "download_fileobj", "(", "bucket", ",", "key", ",", "fp", ",", "ExtraArgs", "=", "extra_args", ")", "# go to start of file", "fp", ".", "seek", "(", "0", ")", "# Read and return all the contents", "return", "fp", ".", "read", "(", ")", "except", "botocore", ".", "exceptions", ".", "ClientError", ":", "LOG", ".", "error", "(", "\"Unable to download Swagger document from S3 Bucket=%s Key=%s Version=%s\"", ",", "bucket", ",", "key", ",", "version", ")", "raise" ]
Download a file from given S3 location, if available. Parameters ---------- bucket : str S3 Bucket name key : str S3 Bucket Key aka file path version : str Optional Version ID of the file Returns ------- str Contents of the file that was downloaded Raises ------ botocore.exceptions.ClientError if we were unable to download the file from S3
[ "Download", "a", "file", "from", "given", "S3", "location", "if", "available", "." ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/commands/local/lib/swagger/reader.py#L183-L229
train
awslabs/aws-sam-cli
samcli/commands/local/lib/swagger/reader.py
SamSwaggerReader._parse_s3_location
def _parse_s3_location(location): """ Parses the given location input as a S3 Location and returns the file's bucket, key and version as separate values. Input can be in two different formats: 1. Dictionary with ``Bucket``, ``Key``, ``Version`` keys 2. String of S3 URI in format ``s3://<bucket>/<key>?versionId=<version>`` If the input is not in either of the above formats, this method will return (None, None, None) tuple for all the values. Parameters ---------- location : str or dict Location of the S3 file Returns ------- str Name of the S3 Bucket. None, if bucket value was not found str Key of the file from S3. None, if key was not provided str Optional Version ID of the file. None, if version ID is not provided """ bucket, key, version = None, None, None if isinstance(location, dict): # This is a S3 Location dictionary. Just grab the fields. It is very well possible that # this dictionary has none of the fields we expect. Return None if the fields don't exist. bucket, key, version = ( location.get("Bucket"), location.get("Key"), location.get("Version") ) elif isinstance(location, string_types) and location.startswith("s3://"): # This is a S3 URI. Parse it using a standard URI parser to extract the components parsed = urlparse(location) query = parse_qs(parsed.query) bucket = parsed.netloc key = parsed.path.lstrip('/') # Leading '/' messes with S3 APIs. Remove it. # If there is a query string that has a single versionId field, # set the object version and return if query and 'versionId' in query and len(query['versionId']) == 1: version = query['versionId'][0] return bucket, key, version
python
def _parse_s3_location(location): """ Parses the given location input as a S3 Location and returns the file's bucket, key and version as separate values. Input can be in two different formats: 1. Dictionary with ``Bucket``, ``Key``, ``Version`` keys 2. String of S3 URI in format ``s3://<bucket>/<key>?versionId=<version>`` If the input is not in either of the above formats, this method will return (None, None, None) tuple for all the values. Parameters ---------- location : str or dict Location of the S3 file Returns ------- str Name of the S3 Bucket. None, if bucket value was not found str Key of the file from S3. None, if key was not provided str Optional Version ID of the file. None, if version ID is not provided """ bucket, key, version = None, None, None if isinstance(location, dict): # This is a S3 Location dictionary. Just grab the fields. It is very well possible that # this dictionary has none of the fields we expect. Return None if the fields don't exist. bucket, key, version = ( location.get("Bucket"), location.get("Key"), location.get("Version") ) elif isinstance(location, string_types) and location.startswith("s3://"): # This is a S3 URI. Parse it using a standard URI parser to extract the components parsed = urlparse(location) query = parse_qs(parsed.query) bucket = parsed.netloc key = parsed.path.lstrip('/') # Leading '/' messes with S3 APIs. Remove it. # If there is a query string that has a single versionId field, # set the object version and return if query and 'versionId' in query and len(query['versionId']) == 1: version = query['versionId'][0] return bucket, key, version
[ "def", "_parse_s3_location", "(", "location", ")", ":", "bucket", ",", "key", ",", "version", "=", "None", ",", "None", ",", "None", "if", "isinstance", "(", "location", ",", "dict", ")", ":", "# This is a S3 Location dictionary. Just grab the fields. It is very well possible that", "# this dictionary has none of the fields we expect. Return None if the fields don't exist.", "bucket", ",", "key", ",", "version", "=", "(", "location", ".", "get", "(", "\"Bucket\"", ")", ",", "location", ".", "get", "(", "\"Key\"", ")", ",", "location", ".", "get", "(", "\"Version\"", ")", ")", "elif", "isinstance", "(", "location", ",", "string_types", ")", "and", "location", ".", "startswith", "(", "\"s3://\"", ")", ":", "# This is a S3 URI. Parse it using a standard URI parser to extract the components", "parsed", "=", "urlparse", "(", "location", ")", "query", "=", "parse_qs", "(", "parsed", ".", "query", ")", "bucket", "=", "parsed", ".", "netloc", "key", "=", "parsed", ".", "path", ".", "lstrip", "(", "'/'", ")", "# Leading '/' messes with S3 APIs. Remove it.", "# If there is a query string that has a single versionId field,", "# set the object version and return", "if", "query", "and", "'versionId'", "in", "query", "and", "len", "(", "query", "[", "'versionId'", "]", ")", "==", "1", ":", "version", "=", "query", "[", "'versionId'", "]", "[", "0", "]", "return", "bucket", ",", "key", ",", "version" ]
Parses the given location input as a S3 Location and returns the file's bucket, key and version as separate values. Input can be in two different formats: 1. Dictionary with ``Bucket``, ``Key``, ``Version`` keys 2. String of S3 URI in format ``s3://<bucket>/<key>?versionId=<version>`` If the input is not in either of the above formats, this method will return (None, None, None) tuple for all the values. Parameters ---------- location : str or dict Location of the S3 file Returns ------- str Name of the S3 Bucket. None, if bucket value was not found str Key of the file from S3. None, if key was not provided str Optional Version ID of the file. None, if version ID is not provided
[ "Parses", "the", "given", "location", "input", "as", "a", "S3", "Location", "and", "returns", "the", "file", "s", "bucket", "key", "and", "version", "as", "separate", "values", ".", "Input", "can", "be", "in", "two", "different", "formats", ":" ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/commands/local/lib/swagger/reader.py#L232-L282
train
awslabs/aws-sam-cli
samcli/cli/context.py
Context.debug
def debug(self, value): """ Turn on debug logging if necessary. :param value: Value of debug flag """ self._debug = value if self._debug: # Turn on debug logging logging.getLogger().setLevel(logging.DEBUG)
python
def debug(self, value): """ Turn on debug logging if necessary. :param value: Value of debug flag """ self._debug = value if self._debug: # Turn on debug logging logging.getLogger().setLevel(logging.DEBUG)
[ "def", "debug", "(", "self", ",", "value", ")", ":", "self", ".", "_debug", "=", "value", "if", "self", ".", "_debug", ":", "# Turn on debug logging", "logging", ".", "getLogger", "(", ")", ".", "setLevel", "(", "logging", ".", "DEBUG", ")" ]
Turn on debug logging if necessary. :param value: Value of debug flag
[ "Turn", "on", "debug", "logging", "if", "necessary", "." ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/cli/context.py#L35-L45
train
awslabs/aws-sam-cli
samcli/cli/context.py
Context._refresh_session
def _refresh_session(self): """ Update boto3's default session by creating a new session based on values set in the context. Some properties of the Boto3's session object are read-only. Therefore when Click parses new AWS session related properties (like region & profile), it will call this method to create a new session with latest values for these properties. """ boto3.setup_default_session(region_name=self._aws_region, profile_name=self._aws_profile)
python
def _refresh_session(self): """ Update boto3's default session by creating a new session based on values set in the context. Some properties of the Boto3's session object are read-only. Therefore when Click parses new AWS session related properties (like region & profile), it will call this method to create a new session with latest values for these properties. """ boto3.setup_default_session(region_name=self._aws_region, profile_name=self._aws_profile)
[ "def", "_refresh_session", "(", "self", ")", ":", "boto3", ".", "setup_default_session", "(", "region_name", "=", "self", ".", "_aws_region", ",", "profile_name", "=", "self", ".", "_aws_profile", ")" ]
Update boto3's default session by creating a new session based on values set in the context. Some properties of the Boto3's session object are read-only. Therefore when Click parses new AWS session related properties (like region & profile), it will call this method to create a new session with latest values for these properties.
[ "Update", "boto3", "s", "default", "session", "by", "creating", "a", "new", "session", "based", "on", "values", "set", "in", "the", "context", ".", "Some", "properties", "of", "the", "Boto3", "s", "session", "object", "are", "read", "-", "only", ".", "Therefore", "when", "Click", "parses", "new", "AWS", "session", "related", "properties", "(", "like", "region", "&", "profile", ")", "it", "will", "call", "this", "method", "to", "create", "a", "new", "session", "with", "latest", "values", "for", "these", "properties", "." ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/cli/context.py#L71-L78
train
awslabs/aws-sam-cli
samcli/local/init/__init__.py
generate_project
def generate_project( location=None, runtime="nodejs", dependency_manager=None, output_dir=".", name='sam-sample-app', no_input=False): """Generates project using cookiecutter and options given Generate project scaffolds a project using default templates if user doesn't provide one via location parameter. Default templates are automatically chosen depending on runtime given by the user. Parameters ---------- location: Path, optional Git, HTTP, Local path or Zip containing cookiecutter template (the default is None, which means no custom template) runtime: str, optional Lambda Runtime (the default is "nodejs", which creates a nodejs project) dependency_manager: str, optional Dependency Manager for the Lambda Runtime Project(the default is "npm" for a "nodejs" Lambda runtime) output_dir: str, optional Output directory where project should be generated (the default is ".", which implies current folder) name: str, optional Name of the project (the default is "sam-sample-app", which implies a project named sam-sample-app will be created) no_input : bool, optional Whether to prompt for input or to accept default values (the default is False, which prompts the user for values it doesn't know for baking) Raises ------ GenerateProjectFailedError If the process of baking a project fails """ template = None for mapping in list(itertools.chain(*(RUNTIME_DEP_TEMPLATE_MAPPING.values()))): if runtime in mapping['runtimes'] or any([r.startswith(runtime) for r in mapping['runtimes']]): if not dependency_manager: template = mapping['init_location'] break elif dependency_manager == mapping['dependency_manager']: template = mapping['init_location'] if not template: msg = "Lambda Runtime {} does not support dependency manager: {}".format(runtime, dependency_manager) raise GenerateProjectFailedError(project=name, provider_error=msg) params = { "template": location if location else template, "output_dir": output_dir, "no_input": no_input } LOG.debug("Parameters dict created with input given") LOG.debug("%s", params) if not location and name is not None: params['extra_context'] = {'project_name': name, 'runtime': runtime} params['no_input'] = True LOG.debug("Parameters dict updated with project name as extra_context") LOG.debug("%s", params) try: LOG.debug("Baking a new template with cookiecutter with all parameters") cookiecutter(**params) except CookiecutterException as e: raise GenerateProjectFailedError(project=name, provider_error=e)
python
def generate_project( location=None, runtime="nodejs", dependency_manager=None, output_dir=".", name='sam-sample-app', no_input=False): """Generates project using cookiecutter and options given Generate project scaffolds a project using default templates if user doesn't provide one via location parameter. Default templates are automatically chosen depending on runtime given by the user. Parameters ---------- location: Path, optional Git, HTTP, Local path or Zip containing cookiecutter template (the default is None, which means no custom template) runtime: str, optional Lambda Runtime (the default is "nodejs", which creates a nodejs project) dependency_manager: str, optional Dependency Manager for the Lambda Runtime Project(the default is "npm" for a "nodejs" Lambda runtime) output_dir: str, optional Output directory where project should be generated (the default is ".", which implies current folder) name: str, optional Name of the project (the default is "sam-sample-app", which implies a project named sam-sample-app will be created) no_input : bool, optional Whether to prompt for input or to accept default values (the default is False, which prompts the user for values it doesn't know for baking) Raises ------ GenerateProjectFailedError If the process of baking a project fails """ template = None for mapping in list(itertools.chain(*(RUNTIME_DEP_TEMPLATE_MAPPING.values()))): if runtime in mapping['runtimes'] or any([r.startswith(runtime) for r in mapping['runtimes']]): if not dependency_manager: template = mapping['init_location'] break elif dependency_manager == mapping['dependency_manager']: template = mapping['init_location'] if not template: msg = "Lambda Runtime {} does not support dependency manager: {}".format(runtime, dependency_manager) raise GenerateProjectFailedError(project=name, provider_error=msg) params = { "template": location if location else template, "output_dir": output_dir, "no_input": no_input } LOG.debug("Parameters dict created with input given") LOG.debug("%s", params) if not location and name is not None: params['extra_context'] = {'project_name': name, 'runtime': runtime} params['no_input'] = True LOG.debug("Parameters dict updated with project name as extra_context") LOG.debug("%s", params) try: LOG.debug("Baking a new template with cookiecutter with all parameters") cookiecutter(**params) except CookiecutterException as e: raise GenerateProjectFailedError(project=name, provider_error=e)
[ "def", "generate_project", "(", "location", "=", "None", ",", "runtime", "=", "\"nodejs\"", ",", "dependency_manager", "=", "None", ",", "output_dir", "=", "\".\"", ",", "name", "=", "'sam-sample-app'", ",", "no_input", "=", "False", ")", ":", "template", "=", "None", "for", "mapping", "in", "list", "(", "itertools", ".", "chain", "(", "*", "(", "RUNTIME_DEP_TEMPLATE_MAPPING", ".", "values", "(", ")", ")", ")", ")", ":", "if", "runtime", "in", "mapping", "[", "'runtimes'", "]", "or", "any", "(", "[", "r", ".", "startswith", "(", "runtime", ")", "for", "r", "in", "mapping", "[", "'runtimes'", "]", "]", ")", ":", "if", "not", "dependency_manager", ":", "template", "=", "mapping", "[", "'init_location'", "]", "break", "elif", "dependency_manager", "==", "mapping", "[", "'dependency_manager'", "]", ":", "template", "=", "mapping", "[", "'init_location'", "]", "if", "not", "template", ":", "msg", "=", "\"Lambda Runtime {} does not support dependency manager: {}\"", ".", "format", "(", "runtime", ",", "dependency_manager", ")", "raise", "GenerateProjectFailedError", "(", "project", "=", "name", ",", "provider_error", "=", "msg", ")", "params", "=", "{", "\"template\"", ":", "location", "if", "location", "else", "template", ",", "\"output_dir\"", ":", "output_dir", ",", "\"no_input\"", ":", "no_input", "}", "LOG", ".", "debug", "(", "\"Parameters dict created with input given\"", ")", "LOG", ".", "debug", "(", "\"%s\"", ",", "params", ")", "if", "not", "location", "and", "name", "is", "not", "None", ":", "params", "[", "'extra_context'", "]", "=", "{", "'project_name'", ":", "name", ",", "'runtime'", ":", "runtime", "}", "params", "[", "'no_input'", "]", "=", "True", "LOG", ".", "debug", "(", "\"Parameters dict updated with project name as extra_context\"", ")", "LOG", ".", "debug", "(", "\"%s\"", ",", "params", ")", "try", ":", "LOG", ".", "debug", "(", "\"Baking a new template with cookiecutter with all parameters\"", ")", "cookiecutter", "(", "*", "*", "params", ")", "except", "CookiecutterException", "as", "e", ":", "raise", "GenerateProjectFailedError", "(", "project", "=", "name", ",", "provider_error", "=", "e", ")" ]
Generates project using cookiecutter and options given Generate project scaffolds a project using default templates if user doesn't provide one via location parameter. Default templates are automatically chosen depending on runtime given by the user. Parameters ---------- location: Path, optional Git, HTTP, Local path or Zip containing cookiecutter template (the default is None, which means no custom template) runtime: str, optional Lambda Runtime (the default is "nodejs", which creates a nodejs project) dependency_manager: str, optional Dependency Manager for the Lambda Runtime Project(the default is "npm" for a "nodejs" Lambda runtime) output_dir: str, optional Output directory where project should be generated (the default is ".", which implies current folder) name: str, optional Name of the project (the default is "sam-sample-app", which implies a project named sam-sample-app will be created) no_input : bool, optional Whether to prompt for input or to accept default values (the default is False, which prompts the user for values it doesn't know for baking) Raises ------ GenerateProjectFailedError If the process of baking a project fails
[ "Generates", "project", "using", "cookiecutter", "and", "options", "given" ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/local/init/__init__.py#L16-L83
train
awslabs/aws-sam-cli
samcli/lib/utils/time.py
to_utc
def to_utc(some_time): """ Convert the given date to UTC, if the date contains a timezone. Parameters ---------- some_time : datetime.datetime datetime object to convert to UTC Returns ------- datetime.datetime Converted datetime object """ # Convert timezone aware objects to UTC if some_time.tzinfo and some_time.utcoffset(): some_time = some_time.astimezone(tzutc()) # Now that time is UTC, simply remove the timezone component. return some_time.replace(tzinfo=None)
python
def to_utc(some_time): """ Convert the given date to UTC, if the date contains a timezone. Parameters ---------- some_time : datetime.datetime datetime object to convert to UTC Returns ------- datetime.datetime Converted datetime object """ # Convert timezone aware objects to UTC if some_time.tzinfo and some_time.utcoffset(): some_time = some_time.astimezone(tzutc()) # Now that time is UTC, simply remove the timezone component. return some_time.replace(tzinfo=None)
[ "def", "to_utc", "(", "some_time", ")", ":", "# Convert timezone aware objects to UTC", "if", "some_time", ".", "tzinfo", "and", "some_time", ".", "utcoffset", "(", ")", ":", "some_time", "=", "some_time", ".", "astimezone", "(", "tzutc", "(", ")", ")", "# Now that time is UTC, simply remove the timezone component.", "return", "some_time", ".", "replace", "(", "tzinfo", "=", "None", ")" ]
Convert the given date to UTC, if the date contains a timezone. Parameters ---------- some_time : datetime.datetime datetime object to convert to UTC Returns ------- datetime.datetime Converted datetime object
[ "Convert", "the", "given", "date", "to", "UTC", "if", "the", "date", "contains", "a", "timezone", "." ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/lib/utils/time.py#L68-L88
train
awslabs/aws-sam-cli
samcli/lib/utils/time.py
parse_date
def parse_date(date_string): """ Parse the given string as datetime object. This parser supports in almost any string formats. For relative times, like `10min ago`, this parser computes the actual time relative to current UTC time. This allows time to always be in UTC if an explicit time zone is not provided. Parameters ---------- date_string : str String representing the date Returns ------- datetime.datetime Parsed datetime object. None, if the string cannot be parsed. """ parser_settings = { # Relative times like '10m ago' must subtract from the current UTC time. Without this setting, dateparser # will use current local time as the base for subtraction, but falsely assume it is a UTC time. Therefore # the time that dateparser returns will be a `datetime` object that did not have any timezone information. # So be explicit to set the time to UTC. "RELATIVE_BASE": datetime.datetime.utcnow() } return dateparser.parse(date_string, settings=parser_settings)
python
def parse_date(date_string): """ Parse the given string as datetime object. This parser supports in almost any string formats. For relative times, like `10min ago`, this parser computes the actual time relative to current UTC time. This allows time to always be in UTC if an explicit time zone is not provided. Parameters ---------- date_string : str String representing the date Returns ------- datetime.datetime Parsed datetime object. None, if the string cannot be parsed. """ parser_settings = { # Relative times like '10m ago' must subtract from the current UTC time. Without this setting, dateparser # will use current local time as the base for subtraction, but falsely assume it is a UTC time. Therefore # the time that dateparser returns will be a `datetime` object that did not have any timezone information. # So be explicit to set the time to UTC. "RELATIVE_BASE": datetime.datetime.utcnow() } return dateparser.parse(date_string, settings=parser_settings)
[ "def", "parse_date", "(", "date_string", ")", ":", "parser_settings", "=", "{", "# Relative times like '10m ago' must subtract from the current UTC time. Without this setting, dateparser", "# will use current local time as the base for subtraction, but falsely assume it is a UTC time. Therefore", "# the time that dateparser returns will be a `datetime` object that did not have any timezone information.", "# So be explicit to set the time to UTC.", "\"RELATIVE_BASE\"", ":", "datetime", ".", "datetime", ".", "utcnow", "(", ")", "}", "return", "dateparser", ".", "parse", "(", "date_string", ",", "settings", "=", "parser_settings", ")" ]
Parse the given string as datetime object. This parser supports in almost any string formats. For relative times, like `10min ago`, this parser computes the actual time relative to current UTC time. This allows time to always be in UTC if an explicit time zone is not provided. Parameters ---------- date_string : str String representing the date Returns ------- datetime.datetime Parsed datetime object. None, if the string cannot be parsed.
[ "Parse", "the", "given", "string", "as", "datetime", "object", ".", "This", "parser", "supports", "in", "almost", "any", "string", "formats", "." ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/lib/utils/time.py#L91-L117
train
awslabs/aws-sam-cli
samcli/commands/local/cli_common/invoke_context.py
InvokeContext.function_name
def function_name(self): """ Returns name of the function to invoke. If no function identifier is provided, this method will return name of the only function from the template :return string: Name of the function :raises InvokeContextException: If function identifier is not provided """ if self._function_identifier: return self._function_identifier # Function Identifier is *not* provided. If there is only one function in the template, # default to it. all_functions = [f for f in self._function_provider.get_all()] if len(all_functions) == 1: return all_functions[0].name # Get all the available function names to print helpful exception message all_function_names = [f.name for f in all_functions] # There are more functions in the template, and function identifier is not provided, hence raise. raise InvokeContextException("You must provide a function identifier (function's Logical ID in the template). " "Possible options in your template: {}".format(all_function_names))
python
def function_name(self): """ Returns name of the function to invoke. If no function identifier is provided, this method will return name of the only function from the template :return string: Name of the function :raises InvokeContextException: If function identifier is not provided """ if self._function_identifier: return self._function_identifier # Function Identifier is *not* provided. If there is only one function in the template, # default to it. all_functions = [f for f in self._function_provider.get_all()] if len(all_functions) == 1: return all_functions[0].name # Get all the available function names to print helpful exception message all_function_names = [f.name for f in all_functions] # There are more functions in the template, and function identifier is not provided, hence raise. raise InvokeContextException("You must provide a function identifier (function's Logical ID in the template). " "Possible options in your template: {}".format(all_function_names))
[ "def", "function_name", "(", "self", ")", ":", "if", "self", ".", "_function_identifier", ":", "return", "self", ".", "_function_identifier", "# Function Identifier is *not* provided. If there is only one function in the template,", "# default to it.", "all_functions", "=", "[", "f", "for", "f", "in", "self", ".", "_function_provider", ".", "get_all", "(", ")", "]", "if", "len", "(", "all_functions", ")", "==", "1", ":", "return", "all_functions", "[", "0", "]", ".", "name", "# Get all the available function names to print helpful exception message", "all_function_names", "=", "[", "f", ".", "name", "for", "f", "in", "all_functions", "]", "# There are more functions in the template, and function identifier is not provided, hence raise.", "raise", "InvokeContextException", "(", "\"You must provide a function identifier (function's Logical ID in the template). \"", "\"Possible options in your template: {}\"", ".", "format", "(", "all_function_names", ")", ")" ]
Returns name of the function to invoke. If no function identifier is provided, this method will return name of the only function from the template :return string: Name of the function :raises InvokeContextException: If function identifier is not provided
[ "Returns", "name", "of", "the", "function", "to", "invoke", ".", "If", "no", "function", "identifier", "is", "provided", "this", "method", "will", "return", "name", "of", "the", "only", "function", "from", "the", "template" ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/commands/local/cli_common/invoke_context.py#L157-L180
train
awslabs/aws-sam-cli
samcli/commands/local/cli_common/invoke_context.py
InvokeContext.local_lambda_runner
def local_lambda_runner(self): """ Returns an instance of the runner capable of running Lambda functions locally :return samcli.commands.local.lib.local_lambda.LocalLambdaRunner: Runner configured to run Lambda functions locally """ layer_downloader = LayerDownloader(self._layer_cache_basedir, self.get_cwd()) image_builder = LambdaImage(layer_downloader, self._skip_pull_image, self._force_image_build) lambda_runtime = LambdaRuntime(self._container_manager, image_builder) return LocalLambdaRunner(local_runtime=lambda_runtime, function_provider=self._function_provider, cwd=self.get_cwd(), env_vars_values=self._env_vars_value, debug_context=self._debug_context)
python
def local_lambda_runner(self): """ Returns an instance of the runner capable of running Lambda functions locally :return samcli.commands.local.lib.local_lambda.LocalLambdaRunner: Runner configured to run Lambda functions locally """ layer_downloader = LayerDownloader(self._layer_cache_basedir, self.get_cwd()) image_builder = LambdaImage(layer_downloader, self._skip_pull_image, self._force_image_build) lambda_runtime = LambdaRuntime(self._container_manager, image_builder) return LocalLambdaRunner(local_runtime=lambda_runtime, function_provider=self._function_provider, cwd=self.get_cwd(), env_vars_values=self._env_vars_value, debug_context=self._debug_context)
[ "def", "local_lambda_runner", "(", "self", ")", ":", "layer_downloader", "=", "LayerDownloader", "(", "self", ".", "_layer_cache_basedir", ",", "self", ".", "get_cwd", "(", ")", ")", "image_builder", "=", "LambdaImage", "(", "layer_downloader", ",", "self", ".", "_skip_pull_image", ",", "self", ".", "_force_image_build", ")", "lambda_runtime", "=", "LambdaRuntime", "(", "self", ".", "_container_manager", ",", "image_builder", ")", "return", "LocalLambdaRunner", "(", "local_runtime", "=", "lambda_runtime", ",", "function_provider", "=", "self", ".", "_function_provider", ",", "cwd", "=", "self", ".", "get_cwd", "(", ")", ",", "env_vars_values", "=", "self", ".", "_env_vars_value", ",", "debug_context", "=", "self", ".", "_debug_context", ")" ]
Returns an instance of the runner capable of running Lambda functions locally :return samcli.commands.local.lib.local_lambda.LocalLambdaRunner: Runner configured to run Lambda functions locally
[ "Returns", "an", "instance", "of", "the", "runner", "capable", "of", "running", "Lambda", "functions", "locally" ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/commands/local/cli_common/invoke_context.py#L183-L201
train
awslabs/aws-sam-cli
samcli/commands/local/cli_common/invoke_context.py
InvokeContext.stdout
def stdout(self): """ Returns stream writer for stdout to output Lambda function logs to Returns ------- samcli.lib.utils.stream_writer.StreamWriter Stream writer for stdout """ stream = self._log_file_handle if self._log_file_handle else osutils.stdout() return StreamWriter(stream, self._is_debugging)
python
def stdout(self): """ Returns stream writer for stdout to output Lambda function logs to Returns ------- samcli.lib.utils.stream_writer.StreamWriter Stream writer for stdout """ stream = self._log_file_handle if self._log_file_handle else osutils.stdout() return StreamWriter(stream, self._is_debugging)
[ "def", "stdout", "(", "self", ")", ":", "stream", "=", "self", ".", "_log_file_handle", "if", "self", ".", "_log_file_handle", "else", "osutils", ".", "stdout", "(", ")", "return", "StreamWriter", "(", "stream", ",", "self", ".", "_is_debugging", ")" ]
Returns stream writer for stdout to output Lambda function logs to Returns ------- samcli.lib.utils.stream_writer.StreamWriter Stream writer for stdout
[ "Returns", "stream", "writer", "for", "stdout", "to", "output", "Lambda", "function", "logs", "to" ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/commands/local/cli_common/invoke_context.py#L204-L214
train
awslabs/aws-sam-cli
samcli/commands/local/cli_common/invoke_context.py
InvokeContext.stderr
def stderr(self): """ Returns stream writer for stderr to output Lambda function errors to Returns ------- samcli.lib.utils.stream_writer.StreamWriter Stream writer for stderr """ stream = self._log_file_handle if self._log_file_handle else osutils.stderr() return StreamWriter(stream, self._is_debugging)
python
def stderr(self): """ Returns stream writer for stderr to output Lambda function errors to Returns ------- samcli.lib.utils.stream_writer.StreamWriter Stream writer for stderr """ stream = self._log_file_handle if self._log_file_handle else osutils.stderr() return StreamWriter(stream, self._is_debugging)
[ "def", "stderr", "(", "self", ")", ":", "stream", "=", "self", ".", "_log_file_handle", "if", "self", ".", "_log_file_handle", "else", "osutils", ".", "stderr", "(", ")", "return", "StreamWriter", "(", "stream", ",", "self", ".", "_is_debugging", ")" ]
Returns stream writer for stderr to output Lambda function errors to Returns ------- samcli.lib.utils.stream_writer.StreamWriter Stream writer for stderr
[ "Returns", "stream", "writer", "for", "stderr", "to", "output", "Lambda", "function", "errors", "to" ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/commands/local/cli_common/invoke_context.py#L217-L227
train
awslabs/aws-sam-cli
samcli/commands/local/cli_common/invoke_context.py
InvokeContext.get_cwd
def get_cwd(self): """ Get the working directory. This is usually relative to the directory that contains the template. If a Docker volume location is specified, it takes preference All Lambda function code paths are resolved relative to this working directory :return string: Working directory """ cwd = os.path.dirname(os.path.abspath(self._template_file)) if self._docker_volume_basedir: cwd = self._docker_volume_basedir return cwd
python
def get_cwd(self): """ Get the working directory. This is usually relative to the directory that contains the template. If a Docker volume location is specified, it takes preference All Lambda function code paths are resolved relative to this working directory :return string: Working directory """ cwd = os.path.dirname(os.path.abspath(self._template_file)) if self._docker_volume_basedir: cwd = self._docker_volume_basedir return cwd
[ "def", "get_cwd", "(", "self", ")", ":", "cwd", "=", "os", ".", "path", ".", "dirname", "(", "os", ".", "path", ".", "abspath", "(", "self", ".", "_template_file", ")", ")", "if", "self", ".", "_docker_volume_basedir", ":", "cwd", "=", "self", ".", "_docker_volume_basedir", "return", "cwd" ]
Get the working directory. This is usually relative to the directory that contains the template. If a Docker volume location is specified, it takes preference All Lambda function code paths are resolved relative to this working directory :return string: Working directory
[ "Get", "the", "working", "directory", ".", "This", "is", "usually", "relative", "to", "the", "directory", "that", "contains", "the", "template", ".", "If", "a", "Docker", "volume", "location", "is", "specified", "it", "takes", "preference" ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/commands/local/cli_common/invoke_context.py#L238-L252
train
awslabs/aws-sam-cli
samcli/commands/local/cli_common/invoke_context.py
InvokeContext._get_env_vars_value
def _get_env_vars_value(filename): """ If the user provided a file containing values of environment variables, this method will read the file and return its value :param string filename: Path to file containing environment variable values :return dict: Value of environment variables, if provided. None otherwise :raises InvokeContextException: If the file was not found or not a valid JSON """ if not filename: return None # Try to read the file and parse it as JSON try: with open(filename, 'r') as fp: return json.load(fp) except Exception as ex: raise InvokeContextException("Could not read environment variables overrides from file {}: {}".format( filename, str(ex)))
python
def _get_env_vars_value(filename): """ If the user provided a file containing values of environment variables, this method will read the file and return its value :param string filename: Path to file containing environment variable values :return dict: Value of environment variables, if provided. None otherwise :raises InvokeContextException: If the file was not found or not a valid JSON """ if not filename: return None # Try to read the file and parse it as JSON try: with open(filename, 'r') as fp: return json.load(fp) except Exception as ex: raise InvokeContextException("Could not read environment variables overrides from file {}: {}".format( filename, str(ex)))
[ "def", "_get_env_vars_value", "(", "filename", ")", ":", "if", "not", "filename", ":", "return", "None", "# Try to read the file and parse it as JSON", "try", ":", "with", "open", "(", "filename", ",", "'r'", ")", "as", "fp", ":", "return", "json", ".", "load", "(", "fp", ")", "except", "Exception", "as", "ex", ":", "raise", "InvokeContextException", "(", "\"Could not read environment variables overrides from file {}: {}\"", ".", "format", "(", "filename", ",", "str", "(", "ex", ")", ")", ")" ]
If the user provided a file containing values of environment variables, this method will read the file and return its value :param string filename: Path to file containing environment variable values :return dict: Value of environment variables, if provided. None otherwise :raises InvokeContextException: If the file was not found or not a valid JSON
[ "If", "the", "user", "provided", "a", "file", "containing", "values", "of", "environment", "variables", "this", "method", "will", "read", "the", "file", "and", "return", "its", "value" ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/commands/local/cli_common/invoke_context.py#L282-L303
train
awslabs/aws-sam-cli
samcli/commands/local/cli_common/invoke_context.py
InvokeContext._get_debug_context
def _get_debug_context(debug_port, debug_args, debugger_path): """ Creates a DebugContext if the InvokeContext is in a debugging mode Parameters ---------- debug_port int Port to bind the debugger to debug_args str Additional arguments passed to the debugger debugger_path str Path to the directory of the debugger to mount on Docker Returns ------- samcli.commands.local.lib.debug_context.DebugContext Object representing the DebugContext Raises ------ samcli.commands.local.cli_common.user_exceptions.DebugContext When the debugger_path is not valid """ if debug_port and debugger_path: try: debugger = Path(debugger_path).resolve(strict=True) except OSError as error: if error.errno == errno.ENOENT: raise DebugContextException("'{}' could not be found.".format(debugger_path)) else: raise error # We turn off pylint here due to https://github.com/PyCQA/pylint/issues/1660 if not debugger.is_dir(): # pylint: disable=no-member raise DebugContextException("'{}' should be a directory with the debugger in it.".format(debugger_path)) debugger_path = str(debugger) return DebugContext(debug_port=debug_port, debug_args=debug_args, debugger_path=debugger_path)
python
def _get_debug_context(debug_port, debug_args, debugger_path): """ Creates a DebugContext if the InvokeContext is in a debugging mode Parameters ---------- debug_port int Port to bind the debugger to debug_args str Additional arguments passed to the debugger debugger_path str Path to the directory of the debugger to mount on Docker Returns ------- samcli.commands.local.lib.debug_context.DebugContext Object representing the DebugContext Raises ------ samcli.commands.local.cli_common.user_exceptions.DebugContext When the debugger_path is not valid """ if debug_port and debugger_path: try: debugger = Path(debugger_path).resolve(strict=True) except OSError as error: if error.errno == errno.ENOENT: raise DebugContextException("'{}' could not be found.".format(debugger_path)) else: raise error # We turn off pylint here due to https://github.com/PyCQA/pylint/issues/1660 if not debugger.is_dir(): # pylint: disable=no-member raise DebugContextException("'{}' should be a directory with the debugger in it.".format(debugger_path)) debugger_path = str(debugger) return DebugContext(debug_port=debug_port, debug_args=debug_args, debugger_path=debugger_path)
[ "def", "_get_debug_context", "(", "debug_port", ",", "debug_args", ",", "debugger_path", ")", ":", "if", "debug_port", "and", "debugger_path", ":", "try", ":", "debugger", "=", "Path", "(", "debugger_path", ")", ".", "resolve", "(", "strict", "=", "True", ")", "except", "OSError", "as", "error", ":", "if", "error", ".", "errno", "==", "errno", ".", "ENOENT", ":", "raise", "DebugContextException", "(", "\"'{}' could not be found.\"", ".", "format", "(", "debugger_path", ")", ")", "else", ":", "raise", "error", "# We turn off pylint here due to https://github.com/PyCQA/pylint/issues/1660", "if", "not", "debugger", ".", "is_dir", "(", ")", ":", "# pylint: disable=no-member", "raise", "DebugContextException", "(", "\"'{}' should be a directory with the debugger in it.\"", ".", "format", "(", "debugger_path", ")", ")", "debugger_path", "=", "str", "(", "debugger", ")", "return", "DebugContext", "(", "debug_port", "=", "debug_port", ",", "debug_args", "=", "debug_args", ",", "debugger_path", "=", "debugger_path", ")" ]
Creates a DebugContext if the InvokeContext is in a debugging mode Parameters ---------- debug_port int Port to bind the debugger to debug_args str Additional arguments passed to the debugger debugger_path str Path to the directory of the debugger to mount on Docker Returns ------- samcli.commands.local.lib.debug_context.DebugContext Object representing the DebugContext Raises ------ samcli.commands.local.cli_common.user_exceptions.DebugContext When the debugger_path is not valid
[ "Creates", "a", "DebugContext", "if", "the", "InvokeContext", "is", "in", "a", "debugging", "mode" ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/commands/local/cli_common/invoke_context.py#L319-L356
train
awslabs/aws-sam-cli
samcli/local/docker/attach_api.py
_read_socket
def _read_socket(socket): """ The stdout and stderr data from the container multiplexed into one stream of response from the Docker API. It follows the protocol described here https://docs.docker.com/engine/api/v1.30/#operation/ContainerAttach. The stream starts with a 8 byte header that contains the frame type and also payload size. Follwing that is the actual payload of given size. Once you read off this payload, we are ready to read the next header. This method will follow this protocol to read payload from the stream and return an iterator that returns a tuple containing the frame type and frame data. Callers can handle the data appropriately based on the frame type. Stdout => Frame Type = 1 Stderr => Frame Type = 2 Parameters ---------- socket Socket to read responses from Yields ------- int Type of the stream (1 => stdout, 2 => stderr) str Data in the stream """ # Keep reading the stream until the stream terminates while True: try: payload_type, payload_size = _read_header(socket) if payload_size < 0: # Something is wrong with the data stream. Payload size can't be less than zero break for data in _read_payload(socket, payload_size): yield payload_type, data except timeout: # Timeouts are normal during debug sessions and long running tasks LOG.debug("Ignoring docker socket timeout") except SocketError: # There isn't enough data in the stream. Probably the socket terminated break
python
def _read_socket(socket): """ The stdout and stderr data from the container multiplexed into one stream of response from the Docker API. It follows the protocol described here https://docs.docker.com/engine/api/v1.30/#operation/ContainerAttach. The stream starts with a 8 byte header that contains the frame type and also payload size. Follwing that is the actual payload of given size. Once you read off this payload, we are ready to read the next header. This method will follow this protocol to read payload from the stream and return an iterator that returns a tuple containing the frame type and frame data. Callers can handle the data appropriately based on the frame type. Stdout => Frame Type = 1 Stderr => Frame Type = 2 Parameters ---------- socket Socket to read responses from Yields ------- int Type of the stream (1 => stdout, 2 => stderr) str Data in the stream """ # Keep reading the stream until the stream terminates while True: try: payload_type, payload_size = _read_header(socket) if payload_size < 0: # Something is wrong with the data stream. Payload size can't be less than zero break for data in _read_payload(socket, payload_size): yield payload_type, data except timeout: # Timeouts are normal during debug sessions and long running tasks LOG.debug("Ignoring docker socket timeout") except SocketError: # There isn't enough data in the stream. Probably the socket terminated break
[ "def", "_read_socket", "(", "socket", ")", ":", "# Keep reading the stream until the stream terminates", "while", "True", ":", "try", ":", "payload_type", ",", "payload_size", "=", "_read_header", "(", "socket", ")", "if", "payload_size", "<", "0", ":", "# Something is wrong with the data stream. Payload size can't be less than zero", "break", "for", "data", "in", "_read_payload", "(", "socket", ",", "payload_size", ")", ":", "yield", "payload_type", ",", "data", "except", "timeout", ":", "# Timeouts are normal during debug sessions and long running tasks", "LOG", ".", "debug", "(", "\"Ignoring docker socket timeout\"", ")", "except", "SocketError", ":", "# There isn't enough data in the stream. Probably the socket terminated", "break" ]
The stdout and stderr data from the container multiplexed into one stream of response from the Docker API. It follows the protocol described here https://docs.docker.com/engine/api/v1.30/#operation/ContainerAttach. The stream starts with a 8 byte header that contains the frame type and also payload size. Follwing that is the actual payload of given size. Once you read off this payload, we are ready to read the next header. This method will follow this protocol to read payload from the stream and return an iterator that returns a tuple containing the frame type and frame data. Callers can handle the data appropriately based on the frame type. Stdout => Frame Type = 1 Stderr => Frame Type = 2 Parameters ---------- socket Socket to read responses from Yields ------- int Type of the stream (1 => stdout, 2 => stderr) str Data in the stream
[ "The", "stdout", "and", "stderr", "data", "from", "the", "container", "multiplexed", "into", "one", "stream", "of", "response", "from", "the", "Docker", "API", ".", "It", "follows", "the", "protocol", "described", "here", "https", ":", "//", "docs", ".", "docker", ".", "com", "/", "engine", "/", "api", "/", "v1", ".", "30", "/", "#operation", "/", "ContainerAttach", ".", "The", "stream", "starts", "with", "a", "8", "byte", "header", "that", "contains", "the", "frame", "type", "and", "also", "payload", "size", ".", "Follwing", "that", "is", "the", "actual", "payload", "of", "given", "size", ".", "Once", "you", "read", "off", "this", "payload", "we", "are", "ready", "to", "read", "the", "next", "header", "." ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/local/docker/attach_api.py#L69-L116
train
awslabs/aws-sam-cli
samcli/local/docker/attach_api.py
_read_payload
def _read_payload(socket, payload_size): """ From the given socket, reads and yields payload of the given size. With sockets, we don't receive all data at once. Therefore this method will yield each time we read some data from the socket until the payload_size has reached or socket has no more data. Parameters ---------- socket Socket to read from payload_size : int Size of the payload to read. Exactly these many bytes are read from the socket before stopping the yield. Yields ------- int Type of the stream (1 => stdout, 2 => stderr) str Data in the stream """ remaining = payload_size while remaining > 0: # Try and read as much as possible data = read(socket, remaining) if data is None: # ``read`` will terminate with an empty string. This is just a transient state where we didn't get any data continue if len(data) == 0: # pylint: disable=C1801 # Empty string. Socket does not have any more data. We are done here even if we haven't read full payload break remaining -= len(data) yield data
python
def _read_payload(socket, payload_size): """ From the given socket, reads and yields payload of the given size. With sockets, we don't receive all data at once. Therefore this method will yield each time we read some data from the socket until the payload_size has reached or socket has no more data. Parameters ---------- socket Socket to read from payload_size : int Size of the payload to read. Exactly these many bytes are read from the socket before stopping the yield. Yields ------- int Type of the stream (1 => stdout, 2 => stderr) str Data in the stream """ remaining = payload_size while remaining > 0: # Try and read as much as possible data = read(socket, remaining) if data is None: # ``read`` will terminate with an empty string. This is just a transient state where we didn't get any data continue if len(data) == 0: # pylint: disable=C1801 # Empty string. Socket does not have any more data. We are done here even if we haven't read full payload break remaining -= len(data) yield data
[ "def", "_read_payload", "(", "socket", ",", "payload_size", ")", ":", "remaining", "=", "payload_size", "while", "remaining", ">", "0", ":", "# Try and read as much as possible", "data", "=", "read", "(", "socket", ",", "remaining", ")", "if", "data", "is", "None", ":", "# ``read`` will terminate with an empty string. This is just a transient state where we didn't get any data", "continue", "if", "len", "(", "data", ")", "==", "0", ":", "# pylint: disable=C1801", "# Empty string. Socket does not have any more data. We are done here even if we haven't read full payload", "break", "remaining", "-=", "len", "(", "data", ")", "yield", "data" ]
From the given socket, reads and yields payload of the given size. With sockets, we don't receive all data at once. Therefore this method will yield each time we read some data from the socket until the payload_size has reached or socket has no more data. Parameters ---------- socket Socket to read from payload_size : int Size of the payload to read. Exactly these many bytes are read from the socket before stopping the yield. Yields ------- int Type of the stream (1 => stdout, 2 => stderr) str Data in the stream
[ "From", "the", "given", "socket", "reads", "and", "yields", "payload", "of", "the", "given", "size", ".", "With", "sockets", "we", "don", "t", "receive", "all", "data", "at", "once", ".", "Therefore", "this", "method", "will", "yield", "each", "time", "we", "read", "some", "data", "from", "the", "socket", "until", "the", "payload_size", "has", "reached", "or", "socket", "has", "no", "more", "data", "." ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/local/docker/attach_api.py#L119-L155
train
awslabs/aws-sam-cli
samcli/cli/options.py
debug_option
def debug_option(f): """ Configures --debug option for CLI :param f: Callback Function to be passed to Click """ def callback(ctx, param, value): state = ctx.ensure_object(Context) state.debug = value return value return click.option('--debug', expose_value=False, is_flag=True, envvar="SAM_DEBUG", help='Turn on debug logging to print debug message generated by SAM CLI.', callback=callback)(f)
python
def debug_option(f): """ Configures --debug option for CLI :param f: Callback Function to be passed to Click """ def callback(ctx, param, value): state = ctx.ensure_object(Context) state.debug = value return value return click.option('--debug', expose_value=False, is_flag=True, envvar="SAM_DEBUG", help='Turn on debug logging to print debug message generated by SAM CLI.', callback=callback)(f)
[ "def", "debug_option", "(", "f", ")", ":", "def", "callback", "(", "ctx", ",", "param", ",", "value", ")", ":", "state", "=", "ctx", ".", "ensure_object", "(", "Context", ")", "state", ".", "debug", "=", "value", "return", "value", "return", "click", ".", "option", "(", "'--debug'", ",", "expose_value", "=", "False", ",", "is_flag", "=", "True", ",", "envvar", "=", "\"SAM_DEBUG\"", ",", "help", "=", "'Turn on debug logging to print debug message generated by SAM CLI.'", ",", "callback", "=", "callback", ")", "(", "f", ")" ]
Configures --debug option for CLI :param f: Callback Function to be passed to Click
[ "Configures", "--", "debug", "option", "for", "CLI" ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/cli/options.py#L11-L27
train
awslabs/aws-sam-cli
samcli/cli/options.py
region_option
def region_option(f): """ Configures --region option for CLI :param f: Callback Function to be passed to Click """ def callback(ctx, param, value): state = ctx.ensure_object(Context) state.region = value return value return click.option('--region', expose_value=False, help='Set the AWS Region of the service (e.g. us-east-1).', callback=callback)(f)
python
def region_option(f): """ Configures --region option for CLI :param f: Callback Function to be passed to Click """ def callback(ctx, param, value): state = ctx.ensure_object(Context) state.region = value return value return click.option('--region', expose_value=False, help='Set the AWS Region of the service (e.g. us-east-1).', callback=callback)(f)
[ "def", "region_option", "(", "f", ")", ":", "def", "callback", "(", "ctx", ",", "param", ",", "value", ")", ":", "state", "=", "ctx", ".", "ensure_object", "(", "Context", ")", "state", ".", "region", "=", "value", "return", "value", "return", "click", ".", "option", "(", "'--region'", ",", "expose_value", "=", "False", ",", "help", "=", "'Set the AWS Region of the service (e.g. us-east-1).'", ",", "callback", "=", "callback", ")", "(", "f", ")" ]
Configures --region option for CLI :param f: Callback Function to be passed to Click
[ "Configures", "--", "region", "option", "for", "CLI" ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/cli/options.py#L30-L44
train
awslabs/aws-sam-cli
samcli/cli/options.py
profile_option
def profile_option(f): """ Configures --profile option for CLI :param f: Callback Function to be passed to Click """ def callback(ctx, param, value): state = ctx.ensure_object(Context) state.profile = value return value return click.option('--profile', expose_value=False, help='Select a specific profile from your credential file to get AWS credentials.', callback=callback)(f)
python
def profile_option(f): """ Configures --profile option for CLI :param f: Callback Function to be passed to Click """ def callback(ctx, param, value): state = ctx.ensure_object(Context) state.profile = value return value return click.option('--profile', expose_value=False, help='Select a specific profile from your credential file to get AWS credentials.', callback=callback)(f)
[ "def", "profile_option", "(", "f", ")", ":", "def", "callback", "(", "ctx", ",", "param", ",", "value", ")", ":", "state", "=", "ctx", ".", "ensure_object", "(", "Context", ")", "state", ".", "profile", "=", "value", "return", "value", "return", "click", ".", "option", "(", "'--profile'", ",", "expose_value", "=", "False", ",", "help", "=", "'Select a specific profile from your credential file to get AWS credentials.'", ",", "callback", "=", "callback", ")", "(", "f", ")" ]
Configures --profile option for CLI :param f: Callback Function to be passed to Click
[ "Configures", "--", "profile", "option", "for", "CLI" ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/cli/options.py#L47-L61
train
awslabs/aws-sam-cli
samcli/local/lambda_service/lambda_error_responses.py
LambdaErrorResponses.resource_not_found
def resource_not_found(function_name): """ Creates a Lambda Service ResourceNotFound Response Parameters ---------- function_name str Name of the function that was requested to invoke Returns ------- Flask.Response A response object representing the ResourceNotFound Error """ exception_tuple = LambdaErrorResponses.ResourceNotFoundException return BaseLocalService.service_response( LambdaErrorResponses._construct_error_response_body( LambdaErrorResponses.USER_ERROR, "Function not found: arn:aws:lambda:us-west-2:012345678901:function:{}".format(function_name) ), LambdaErrorResponses._construct_headers(exception_tuple[0]), exception_tuple[1] )
python
def resource_not_found(function_name): """ Creates a Lambda Service ResourceNotFound Response Parameters ---------- function_name str Name of the function that was requested to invoke Returns ------- Flask.Response A response object representing the ResourceNotFound Error """ exception_tuple = LambdaErrorResponses.ResourceNotFoundException return BaseLocalService.service_response( LambdaErrorResponses._construct_error_response_body( LambdaErrorResponses.USER_ERROR, "Function not found: arn:aws:lambda:us-west-2:012345678901:function:{}".format(function_name) ), LambdaErrorResponses._construct_headers(exception_tuple[0]), exception_tuple[1] )
[ "def", "resource_not_found", "(", "function_name", ")", ":", "exception_tuple", "=", "LambdaErrorResponses", ".", "ResourceNotFoundException", "return", "BaseLocalService", ".", "service_response", "(", "LambdaErrorResponses", ".", "_construct_error_response_body", "(", "LambdaErrorResponses", ".", "USER_ERROR", ",", "\"Function not found: arn:aws:lambda:us-west-2:012345678901:function:{}\"", ".", "format", "(", "function_name", ")", ")", ",", "LambdaErrorResponses", ".", "_construct_headers", "(", "exception_tuple", "[", "0", "]", ")", ",", "exception_tuple", "[", "1", "]", ")" ]
Creates a Lambda Service ResourceNotFound Response Parameters ---------- function_name str Name of the function that was requested to invoke Returns ------- Flask.Response A response object representing the ResourceNotFound Error
[ "Creates", "a", "Lambda", "Service", "ResourceNotFound", "Response" ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/local/lambda_service/lambda_error_responses.py#L39-L62
train
awslabs/aws-sam-cli
samcli/local/lambda_service/lambda_error_responses.py
LambdaErrorResponses.invalid_request_content
def invalid_request_content(message): """ Creates a Lambda Service InvalidRequestContent Response Parameters ---------- message str Message to be added to the body of the response Returns ------- Flask.Response A response object representing the InvalidRequestContent Error """ exception_tuple = LambdaErrorResponses.InvalidRequestContentException return BaseLocalService.service_response( LambdaErrorResponses._construct_error_response_body(LambdaErrorResponses.USER_ERROR, message), LambdaErrorResponses._construct_headers(exception_tuple[0]), exception_tuple[1] )
python
def invalid_request_content(message): """ Creates a Lambda Service InvalidRequestContent Response Parameters ---------- message str Message to be added to the body of the response Returns ------- Flask.Response A response object representing the InvalidRequestContent Error """ exception_tuple = LambdaErrorResponses.InvalidRequestContentException return BaseLocalService.service_response( LambdaErrorResponses._construct_error_response_body(LambdaErrorResponses.USER_ERROR, message), LambdaErrorResponses._construct_headers(exception_tuple[0]), exception_tuple[1] )
[ "def", "invalid_request_content", "(", "message", ")", ":", "exception_tuple", "=", "LambdaErrorResponses", ".", "InvalidRequestContentException", "return", "BaseLocalService", ".", "service_response", "(", "LambdaErrorResponses", ".", "_construct_error_response_body", "(", "LambdaErrorResponses", ".", "USER_ERROR", ",", "message", ")", ",", "LambdaErrorResponses", ".", "_construct_headers", "(", "exception_tuple", "[", "0", "]", ")", ",", "exception_tuple", "[", "1", "]", ")" ]
Creates a Lambda Service InvalidRequestContent Response Parameters ---------- message str Message to be added to the body of the response Returns ------- Flask.Response A response object representing the InvalidRequestContent Error
[ "Creates", "a", "Lambda", "Service", "InvalidRequestContent", "Response" ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/local/lambda_service/lambda_error_responses.py#L65-L85
train
awslabs/aws-sam-cli
samcli/local/lambda_service/lambda_error_responses.py
LambdaErrorResponses.unsupported_media_type
def unsupported_media_type(content_type): """ Creates a Lambda Service UnsupportedMediaType Response Parameters ---------- content_type str Content Type of the request that was made Returns ------- Flask.Response A response object representing the UnsupportedMediaType Error """ exception_tuple = LambdaErrorResponses.UnsupportedMediaTypeException return BaseLocalService.service_response( LambdaErrorResponses._construct_error_response_body(LambdaErrorResponses.USER_ERROR, "Unsupported content type: {}".format(content_type)), LambdaErrorResponses._construct_headers(exception_tuple[0]), exception_tuple[1] )
python
def unsupported_media_type(content_type): """ Creates a Lambda Service UnsupportedMediaType Response Parameters ---------- content_type str Content Type of the request that was made Returns ------- Flask.Response A response object representing the UnsupportedMediaType Error """ exception_tuple = LambdaErrorResponses.UnsupportedMediaTypeException return BaseLocalService.service_response( LambdaErrorResponses._construct_error_response_body(LambdaErrorResponses.USER_ERROR, "Unsupported content type: {}".format(content_type)), LambdaErrorResponses._construct_headers(exception_tuple[0]), exception_tuple[1] )
[ "def", "unsupported_media_type", "(", "content_type", ")", ":", "exception_tuple", "=", "LambdaErrorResponses", ".", "UnsupportedMediaTypeException", "return", "BaseLocalService", ".", "service_response", "(", "LambdaErrorResponses", ".", "_construct_error_response_body", "(", "LambdaErrorResponses", ".", "USER_ERROR", ",", "\"Unsupported content type: {}\"", ".", "format", "(", "content_type", ")", ")", ",", "LambdaErrorResponses", ".", "_construct_headers", "(", "exception_tuple", "[", "0", "]", ")", ",", "exception_tuple", "[", "1", "]", ")" ]
Creates a Lambda Service UnsupportedMediaType Response Parameters ---------- content_type str Content Type of the request that was made Returns ------- Flask.Response A response object representing the UnsupportedMediaType Error
[ "Creates", "a", "Lambda", "Service", "UnsupportedMediaType", "Response" ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/local/lambda_service/lambda_error_responses.py#L88-L109
train
awslabs/aws-sam-cli
samcli/local/lambda_service/lambda_error_responses.py
LambdaErrorResponses.generic_service_exception
def generic_service_exception(*args): """ Creates a Lambda Service Generic ServiceException Response Parameters ---------- args list List of arguments Flask passes to the method Returns ------- Flask.Response A response object representing the GenericServiceException Error """ exception_tuple = LambdaErrorResponses.ServiceException return BaseLocalService.service_response( LambdaErrorResponses._construct_error_response_body(LambdaErrorResponses.SERVICE_ERROR, "ServiceException"), LambdaErrorResponses._construct_headers(exception_tuple[0]), exception_tuple[1] )
python
def generic_service_exception(*args): """ Creates a Lambda Service Generic ServiceException Response Parameters ---------- args list List of arguments Flask passes to the method Returns ------- Flask.Response A response object representing the GenericServiceException Error """ exception_tuple = LambdaErrorResponses.ServiceException return BaseLocalService.service_response( LambdaErrorResponses._construct_error_response_body(LambdaErrorResponses.SERVICE_ERROR, "ServiceException"), LambdaErrorResponses._construct_headers(exception_tuple[0]), exception_tuple[1] )
[ "def", "generic_service_exception", "(", "*", "args", ")", ":", "exception_tuple", "=", "LambdaErrorResponses", ".", "ServiceException", "return", "BaseLocalService", ".", "service_response", "(", "LambdaErrorResponses", ".", "_construct_error_response_body", "(", "LambdaErrorResponses", ".", "SERVICE_ERROR", ",", "\"ServiceException\"", ")", ",", "LambdaErrorResponses", ".", "_construct_headers", "(", "exception_tuple", "[", "0", "]", ")", ",", "exception_tuple", "[", "1", "]", ")" ]
Creates a Lambda Service Generic ServiceException Response Parameters ---------- args list List of arguments Flask passes to the method Returns ------- Flask.Response A response object representing the GenericServiceException Error
[ "Creates", "a", "Lambda", "Service", "Generic", "ServiceException", "Response" ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/local/lambda_service/lambda_error_responses.py#L112-L132
train
awslabs/aws-sam-cli
samcli/local/lambda_service/lambda_error_responses.py
LambdaErrorResponses.generic_path_not_found
def generic_path_not_found(*args): """ Creates a Lambda Service Generic PathNotFound Response Parameters ---------- args list List of arguments Flask passes to the method Returns ------- Flask.Response A response object representing the GenericPathNotFound Error """ exception_tuple = LambdaErrorResponses.PathNotFoundException return BaseLocalService.service_response( LambdaErrorResponses._construct_error_response_body( LambdaErrorResponses.LOCAL_SERVICE_ERROR, "PathNotFoundException"), LambdaErrorResponses._construct_headers(exception_tuple[0]), exception_tuple[1] )
python
def generic_path_not_found(*args): """ Creates a Lambda Service Generic PathNotFound Response Parameters ---------- args list List of arguments Flask passes to the method Returns ------- Flask.Response A response object representing the GenericPathNotFound Error """ exception_tuple = LambdaErrorResponses.PathNotFoundException return BaseLocalService.service_response( LambdaErrorResponses._construct_error_response_body( LambdaErrorResponses.LOCAL_SERVICE_ERROR, "PathNotFoundException"), LambdaErrorResponses._construct_headers(exception_tuple[0]), exception_tuple[1] )
[ "def", "generic_path_not_found", "(", "*", "args", ")", ":", "exception_tuple", "=", "LambdaErrorResponses", ".", "PathNotFoundException", "return", "BaseLocalService", ".", "service_response", "(", "LambdaErrorResponses", ".", "_construct_error_response_body", "(", "LambdaErrorResponses", ".", "LOCAL_SERVICE_ERROR", ",", "\"PathNotFoundException\"", ")", ",", "LambdaErrorResponses", ".", "_construct_headers", "(", "exception_tuple", "[", "0", "]", ")", ",", "exception_tuple", "[", "1", "]", ")" ]
Creates a Lambda Service Generic PathNotFound Response Parameters ---------- args list List of arguments Flask passes to the method Returns ------- Flask.Response A response object representing the GenericPathNotFound Error
[ "Creates", "a", "Lambda", "Service", "Generic", "PathNotFound", "Response" ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/local/lambda_service/lambda_error_responses.py#L158-L179
train
awslabs/aws-sam-cli
samcli/local/lambda_service/lambda_error_responses.py
LambdaErrorResponses.generic_method_not_allowed
def generic_method_not_allowed(*args): """ Creates a Lambda Service Generic MethodNotAllowed Response Parameters ---------- args list List of arguments Flask passes to the method Returns ------- Flask.Response A response object representing the GenericMethodNotAllowed Error """ exception_tuple = LambdaErrorResponses.MethodNotAllowedException return BaseLocalService.service_response( LambdaErrorResponses._construct_error_response_body(LambdaErrorResponses.LOCAL_SERVICE_ERROR, "MethodNotAllowedException"), LambdaErrorResponses._construct_headers(exception_tuple[0]), exception_tuple[1] )
python
def generic_method_not_allowed(*args): """ Creates a Lambda Service Generic MethodNotAllowed Response Parameters ---------- args list List of arguments Flask passes to the method Returns ------- Flask.Response A response object representing the GenericMethodNotAllowed Error """ exception_tuple = LambdaErrorResponses.MethodNotAllowedException return BaseLocalService.service_response( LambdaErrorResponses._construct_error_response_body(LambdaErrorResponses.LOCAL_SERVICE_ERROR, "MethodNotAllowedException"), LambdaErrorResponses._construct_headers(exception_tuple[0]), exception_tuple[1] )
[ "def", "generic_method_not_allowed", "(", "*", "args", ")", ":", "exception_tuple", "=", "LambdaErrorResponses", ".", "MethodNotAllowedException", "return", "BaseLocalService", ".", "service_response", "(", "LambdaErrorResponses", ".", "_construct_error_response_body", "(", "LambdaErrorResponses", ".", "LOCAL_SERVICE_ERROR", ",", "\"MethodNotAllowedException\"", ")", ",", "LambdaErrorResponses", ".", "_construct_headers", "(", "exception_tuple", "[", "0", "]", ")", ",", "exception_tuple", "[", "1", "]", ")" ]
Creates a Lambda Service Generic MethodNotAllowed Response Parameters ---------- args list List of arguments Flask passes to the method Returns ------- Flask.Response A response object representing the GenericMethodNotAllowed Error
[ "Creates", "a", "Lambda", "Service", "Generic", "MethodNotAllowed", "Response" ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/local/lambda_service/lambda_error_responses.py#L182-L203
train
awslabs/aws-sam-cli
samcli/commands/validate/validate.py
do_cli
def do_cli(ctx, template): """ Implementation of the ``cli`` method, just separated out for unit testing purposes """ sam_template = _read_sam_file(template) iam_client = boto3.client('iam') validator = SamTemplateValidator(sam_template, ManagedPolicyLoader(iam_client)) try: validator.is_valid() except InvalidSamDocumentException as e: click.secho("Template provided at '{}' was invalid SAM Template.".format(template), bg='red') raise InvalidSamTemplateException(str(e)) except NoCredentialsError as e: raise UserException("AWS Credentials are required. Please configure your credentials.") click.secho("{} is a valid SAM Template".format(template), fg='green')
python
def do_cli(ctx, template): """ Implementation of the ``cli`` method, just separated out for unit testing purposes """ sam_template = _read_sam_file(template) iam_client = boto3.client('iam') validator = SamTemplateValidator(sam_template, ManagedPolicyLoader(iam_client)) try: validator.is_valid() except InvalidSamDocumentException as e: click.secho("Template provided at '{}' was invalid SAM Template.".format(template), bg='red') raise InvalidSamTemplateException(str(e)) except NoCredentialsError as e: raise UserException("AWS Credentials are required. Please configure your credentials.") click.secho("{} is a valid SAM Template".format(template), fg='green')
[ "def", "do_cli", "(", "ctx", ",", "template", ")", ":", "sam_template", "=", "_read_sam_file", "(", "template", ")", "iam_client", "=", "boto3", ".", "client", "(", "'iam'", ")", "validator", "=", "SamTemplateValidator", "(", "sam_template", ",", "ManagedPolicyLoader", "(", "iam_client", ")", ")", "try", ":", "validator", ".", "is_valid", "(", ")", "except", "InvalidSamDocumentException", "as", "e", ":", "click", ".", "secho", "(", "\"Template provided at '{}' was invalid SAM Template.\"", ".", "format", "(", "template", ")", ",", "bg", "=", "'red'", ")", "raise", "InvalidSamTemplateException", "(", "str", "(", "e", ")", ")", "except", "NoCredentialsError", "as", "e", ":", "raise", "UserException", "(", "\"AWS Credentials are required. Please configure your credentials.\"", ")", "click", ".", "secho", "(", "\"{} is a valid SAM Template\"", ".", "format", "(", "template", ")", ",", "fg", "=", "'green'", ")" ]
Implementation of the ``cli`` method, just separated out for unit testing purposes
[ "Implementation", "of", "the", "cli", "method", "just", "separated", "out", "for", "unit", "testing", "purposes" ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/commands/validate/validate.py#L33-L51
train
awslabs/aws-sam-cli
samcli/commands/validate/validate.py
_read_sam_file
def _read_sam_file(template): """ Reads the file (json and yaml supported) provided and returns the dictionary representation of the file. :param str template: Path to the template file :return dict: Dictionary representing the SAM Template :raises: SamTemplateNotFoundException when the template file does not exist """ if not os.path.exists(template): click.secho("SAM Template Not Found", bg='red') raise SamTemplateNotFoundException("Template at {} is not found".format(template)) with click.open_file(template, 'r') as sam_template: sam_template = yaml_parse(sam_template.read()) return sam_template
python
def _read_sam_file(template): """ Reads the file (json and yaml supported) provided and returns the dictionary representation of the file. :param str template: Path to the template file :return dict: Dictionary representing the SAM Template :raises: SamTemplateNotFoundException when the template file does not exist """ if not os.path.exists(template): click.secho("SAM Template Not Found", bg='red') raise SamTemplateNotFoundException("Template at {} is not found".format(template)) with click.open_file(template, 'r') as sam_template: sam_template = yaml_parse(sam_template.read()) return sam_template
[ "def", "_read_sam_file", "(", "template", ")", ":", "if", "not", "os", ".", "path", ".", "exists", "(", "template", ")", ":", "click", ".", "secho", "(", "\"SAM Template Not Found\"", ",", "bg", "=", "'red'", ")", "raise", "SamTemplateNotFoundException", "(", "\"Template at {} is not found\"", ".", "format", "(", "template", ")", ")", "with", "click", ".", "open_file", "(", "template", ",", "'r'", ")", "as", "sam_template", ":", "sam_template", "=", "yaml_parse", "(", "sam_template", ".", "read", "(", ")", ")", "return", "sam_template" ]
Reads the file (json and yaml supported) provided and returns the dictionary representation of the file. :param str template: Path to the template file :return dict: Dictionary representing the SAM Template :raises: SamTemplateNotFoundException when the template file does not exist
[ "Reads", "the", "file", "(", "json", "and", "yaml", "supported", ")", "provided", "and", "returns", "the", "dictionary", "representation", "of", "the", "file", "." ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/commands/validate/validate.py#L54-L69
train
awslabs/aws-sam-cli
samcli/lib/utils/codeuri.py
resolve_code_path
def resolve_code_path(cwd, codeuri): """ Returns path to the function code resolved based on current working directory. Parameters ---------- cwd str Current working directory codeuri CodeURI of the function. This should contain the path to the function code Returns ------- str Absolute path to the function code """ LOG.debug("Resolving code path. Cwd=%s, CodeUri=%s", cwd, codeuri) # First, let us figure out the current working directory. # If current working directory is not provided, then default to the directory where the CLI is running from if not cwd or cwd == PRESENT_DIR: cwd = os.getcwd() # Make sure cwd is an absolute path cwd = os.path.abspath(cwd) # Next, let us get absolute path of function code. # Codepath is always relative to current working directory # If the path is relative, then construct the absolute version if not os.path.isabs(codeuri): codeuri = os.path.normpath(os.path.join(cwd, codeuri)) return codeuri
python
def resolve_code_path(cwd, codeuri): """ Returns path to the function code resolved based on current working directory. Parameters ---------- cwd str Current working directory codeuri CodeURI of the function. This should contain the path to the function code Returns ------- str Absolute path to the function code """ LOG.debug("Resolving code path. Cwd=%s, CodeUri=%s", cwd, codeuri) # First, let us figure out the current working directory. # If current working directory is not provided, then default to the directory where the CLI is running from if not cwd or cwd == PRESENT_DIR: cwd = os.getcwd() # Make sure cwd is an absolute path cwd = os.path.abspath(cwd) # Next, let us get absolute path of function code. # Codepath is always relative to current working directory # If the path is relative, then construct the absolute version if not os.path.isabs(codeuri): codeuri = os.path.normpath(os.path.join(cwd, codeuri)) return codeuri
[ "def", "resolve_code_path", "(", "cwd", ",", "codeuri", ")", ":", "LOG", ".", "debug", "(", "\"Resolving code path. Cwd=%s, CodeUri=%s\"", ",", "cwd", ",", "codeuri", ")", "# First, let us figure out the current working directory.", "# If current working directory is not provided, then default to the directory where the CLI is running from", "if", "not", "cwd", "or", "cwd", "==", "PRESENT_DIR", ":", "cwd", "=", "os", ".", "getcwd", "(", ")", "# Make sure cwd is an absolute path", "cwd", "=", "os", ".", "path", ".", "abspath", "(", "cwd", ")", "# Next, let us get absolute path of function code.", "# Codepath is always relative to current working directory", "# If the path is relative, then construct the absolute version", "if", "not", "os", ".", "path", ".", "isabs", "(", "codeuri", ")", ":", "codeuri", "=", "os", ".", "path", ".", "normpath", "(", "os", ".", "path", ".", "join", "(", "cwd", ",", "codeuri", ")", ")", "return", "codeuri" ]
Returns path to the function code resolved based on current working directory. Parameters ---------- cwd str Current working directory codeuri CodeURI of the function. This should contain the path to the function code Returns ------- str Absolute path to the function code
[ "Returns", "path", "to", "the", "function", "code", "resolved", "based", "on", "current", "working", "directory", "." ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/lib/utils/codeuri.py#L13-L46
train
awslabs/aws-sam-cli
samcli/local/apigw/path_converter.py
PathConverter.convert_path_to_flask
def convert_path_to_flask(path): """ Converts a Path from an Api Gateway defined path to one that is accepted by Flask Examples: '/id/{id}' => '/id/<id>' '/{proxy+}' => '/<path:proxy>' :param str path: Path to convert to Flask defined path :return str: Path representing a Flask path """ proxy_sub_path = APIGW_TO_FLASK_REGEX.sub(FLASK_CAPTURE_ALL_PATH, path) # Replace the '{' and '}' with '<' and '>' respectively return proxy_sub_path.replace(LEFT_BRACKET, LEFT_ANGLE_BRACKET).replace(RIGHT_BRACKET, RIGHT_ANGLE_BRACKET)
python
def convert_path_to_flask(path): """ Converts a Path from an Api Gateway defined path to one that is accepted by Flask Examples: '/id/{id}' => '/id/<id>' '/{proxy+}' => '/<path:proxy>' :param str path: Path to convert to Flask defined path :return str: Path representing a Flask path """ proxy_sub_path = APIGW_TO_FLASK_REGEX.sub(FLASK_CAPTURE_ALL_PATH, path) # Replace the '{' and '}' with '<' and '>' respectively return proxy_sub_path.replace(LEFT_BRACKET, LEFT_ANGLE_BRACKET).replace(RIGHT_BRACKET, RIGHT_ANGLE_BRACKET)
[ "def", "convert_path_to_flask", "(", "path", ")", ":", "proxy_sub_path", "=", "APIGW_TO_FLASK_REGEX", ".", "sub", "(", "FLASK_CAPTURE_ALL_PATH", ",", "path", ")", "# Replace the '{' and '}' with '<' and '>' respectively", "return", "proxy_sub_path", ".", "replace", "(", "LEFT_BRACKET", ",", "LEFT_ANGLE_BRACKET", ")", ".", "replace", "(", "RIGHT_BRACKET", ",", "RIGHT_ANGLE_BRACKET", ")" ]
Converts a Path from an Api Gateway defined path to one that is accepted by Flask Examples: '/id/{id}' => '/id/<id>' '/{proxy+}' => '/<path:proxy>' :param str path: Path to convert to Flask defined path :return str: Path representing a Flask path
[ "Converts", "a", "Path", "from", "an", "Api", "Gateway", "defined", "path", "to", "one", "that", "is", "accepted", "by", "Flask" ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/local/apigw/path_converter.py#L37-L52
train
awslabs/aws-sam-cli
samcli/local/apigw/path_converter.py
PathConverter.convert_path_to_api_gateway
def convert_path_to_api_gateway(path): """ Converts a Path from a Flask defined path to one that is accepted by Api Gateway Examples: '/id/<id>' => '/id/{id}' '/<path:proxy>' => '/{proxy+}' :param str path: Path to convert to Api Gateway defined path :return str: Path representing an Api Gateway path """ proxy_sub_path = FLASK_TO_APIGW_REGEX.sub(PROXY_PATH_PARAMS, path) # Replace the '<' and '>' with '{' and '}' respectively return proxy_sub_path.replace(LEFT_ANGLE_BRACKET, LEFT_BRACKET).replace(RIGHT_ANGLE_BRACKET, RIGHT_BRACKET)
python
def convert_path_to_api_gateway(path): """ Converts a Path from a Flask defined path to one that is accepted by Api Gateway Examples: '/id/<id>' => '/id/{id}' '/<path:proxy>' => '/{proxy+}' :param str path: Path to convert to Api Gateway defined path :return str: Path representing an Api Gateway path """ proxy_sub_path = FLASK_TO_APIGW_REGEX.sub(PROXY_PATH_PARAMS, path) # Replace the '<' and '>' with '{' and '}' respectively return proxy_sub_path.replace(LEFT_ANGLE_BRACKET, LEFT_BRACKET).replace(RIGHT_ANGLE_BRACKET, RIGHT_BRACKET)
[ "def", "convert_path_to_api_gateway", "(", "path", ")", ":", "proxy_sub_path", "=", "FLASK_TO_APIGW_REGEX", ".", "sub", "(", "PROXY_PATH_PARAMS", ",", "path", ")", "# Replace the '<' and '>' with '{' and '}' respectively", "return", "proxy_sub_path", ".", "replace", "(", "LEFT_ANGLE_BRACKET", ",", "LEFT_BRACKET", ")", ".", "replace", "(", "RIGHT_ANGLE_BRACKET", ",", "RIGHT_BRACKET", ")" ]
Converts a Path from a Flask defined path to one that is accepted by Api Gateway Examples: '/id/<id>' => '/id/{id}' '/<path:proxy>' => '/{proxy+}' :param str path: Path to convert to Api Gateway defined path :return str: Path representing an Api Gateway path
[ "Converts", "a", "Path", "from", "a", "Flask", "defined", "path", "to", "one", "that", "is", "accepted", "by", "Api", "Gateway" ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/local/apigw/path_converter.py#L55-L70
train
awslabs/aws-sam-cli
samcli/commands/local/lib/swagger/integration_uri.py
LambdaUri.get_function_name
def get_function_name(integration_uri): """ Gets the name of the function from the Integration URI ARN. This is a best effort service which returns None if function name could not be parsed. This can happen when the ARN is an intrinsic function which is too complex or the ARN is not a Lambda integration. Parameters ---------- integration_uri : basestring or dict Integration URI data extracted from Swagger dictionary. This could be a string of the ARN or an intrinsic function that will resolve to the ARN Returns ------- basestring or None If the function name could be parsed out of the Integration URI ARN. None, otherwise """ arn = LambdaUri._get_function_arn(integration_uri) LOG.debug("Extracted Function ARN: %s", arn) return LambdaUri._get_function_name_from_arn(arn)
python
def get_function_name(integration_uri): """ Gets the name of the function from the Integration URI ARN. This is a best effort service which returns None if function name could not be parsed. This can happen when the ARN is an intrinsic function which is too complex or the ARN is not a Lambda integration. Parameters ---------- integration_uri : basestring or dict Integration URI data extracted from Swagger dictionary. This could be a string of the ARN or an intrinsic function that will resolve to the ARN Returns ------- basestring or None If the function name could be parsed out of the Integration URI ARN. None, otherwise """ arn = LambdaUri._get_function_arn(integration_uri) LOG.debug("Extracted Function ARN: %s", arn) return LambdaUri._get_function_name_from_arn(arn)
[ "def", "get_function_name", "(", "integration_uri", ")", ":", "arn", "=", "LambdaUri", ".", "_get_function_arn", "(", "integration_uri", ")", "LOG", ".", "debug", "(", "\"Extracted Function ARN: %s\"", ",", "arn", ")", "return", "LambdaUri", ".", "_get_function_name_from_arn", "(", "arn", ")" ]
Gets the name of the function from the Integration URI ARN. This is a best effort service which returns None if function name could not be parsed. This can happen when the ARN is an intrinsic function which is too complex or the ARN is not a Lambda integration. Parameters ---------- integration_uri : basestring or dict Integration URI data extracted from Swagger dictionary. This could be a string of the ARN or an intrinsic function that will resolve to the ARN Returns ------- basestring or None If the function name could be parsed out of the Integration URI ARN. None, otherwise
[ "Gets", "the", "name", "of", "the", "function", "from", "the", "Integration", "URI", "ARN", ".", "This", "is", "a", "best", "effort", "service", "which", "returns", "None", "if", "function", "name", "could", "not", "be", "parsed", ".", "This", "can", "happen", "when", "the", "ARN", "is", "an", "intrinsic", "function", "which", "is", "too", "complex", "or", "the", "ARN", "is", "not", "a", "Lambda", "integration", "." ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/commands/local/lib/swagger/integration_uri.py#L42-L64
train
awslabs/aws-sam-cli
samcli/commands/local/lib/swagger/integration_uri.py
LambdaUri._get_function_arn
def _get_function_arn(uri_data): """ Integration URI can be expressed in various shapes and forms. This method normalizes the Integration URI ARN and returns the Lambda Function ARN. Here are the different forms of Integration URI ARN: - String: - Fully resolved ARN - ARN with Stage Variables: Ex: arn:aws:apigateway:ap-southeast-2:lambda:path/2015-03-31/functions/arn:aws:lambda:ap-southeast-2:123456789012:function:${stageVariables.PostFunctionName}/invocations # NOQA - Dictionary: Usually contains intrinsic functions - Fn::Sub: Example: { "Fn::Sub": "arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${LambdaFunction.Arn}/invocations" } - Fn::Join: **Unsupported**. It is very hard to combine the joins into one string especially when certain properties are resolved only at runtime. - Ref, Fn::GetAtt: **Unsupported**. Impossible to use these intrinsics with integration URI. CFN doesn't support this functionality. Note ~~~~ This method supports only a very restricted subset of intrinsic functions with Swagger document. This is the best we can do without implementing a full blown intrinsic function resolution module. Parameters ---------- uri_data : string or dict Value of Integration URI. It can either be a string or an intrinsic function that resolves to a string Returns ------- basestring or None Lambda Function ARN extracted from Integration URI. None, if it cannot get function Arn """ if not uri_data: return None if LambdaUri._is_sub_intrinsic(uri_data): uri_data = LambdaUri._resolve_fn_sub(uri_data) LOG.debug("Resolved Sub intrinsic function: %s", uri_data) # Even after processing intrinsics, this is not a string. Give up. if not isinstance(uri_data, string_types): LOG.debug("This Integration URI format is not supported: %s", uri_data) return None # uri_data is a string. # Let's check if it is actually a Lambda Integration URI and if so, extract the Function ARN matches = re.match(LambdaUri._REGEX_GET_FUNCTION_ARN, uri_data) if not matches or not matches.groups(): LOG.debug("Ignoring Integration URI because it is not a Lambda Function integration: %s", uri_data) return None groups = matches.groups() return groups[0]
python
def _get_function_arn(uri_data): """ Integration URI can be expressed in various shapes and forms. This method normalizes the Integration URI ARN and returns the Lambda Function ARN. Here are the different forms of Integration URI ARN: - String: - Fully resolved ARN - ARN with Stage Variables: Ex: arn:aws:apigateway:ap-southeast-2:lambda:path/2015-03-31/functions/arn:aws:lambda:ap-southeast-2:123456789012:function:${stageVariables.PostFunctionName}/invocations # NOQA - Dictionary: Usually contains intrinsic functions - Fn::Sub: Example: { "Fn::Sub": "arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${LambdaFunction.Arn}/invocations" } - Fn::Join: **Unsupported**. It is very hard to combine the joins into one string especially when certain properties are resolved only at runtime. - Ref, Fn::GetAtt: **Unsupported**. Impossible to use these intrinsics with integration URI. CFN doesn't support this functionality. Note ~~~~ This method supports only a very restricted subset of intrinsic functions with Swagger document. This is the best we can do without implementing a full blown intrinsic function resolution module. Parameters ---------- uri_data : string or dict Value of Integration URI. It can either be a string or an intrinsic function that resolves to a string Returns ------- basestring or None Lambda Function ARN extracted from Integration URI. None, if it cannot get function Arn """ if not uri_data: return None if LambdaUri._is_sub_intrinsic(uri_data): uri_data = LambdaUri._resolve_fn_sub(uri_data) LOG.debug("Resolved Sub intrinsic function: %s", uri_data) # Even after processing intrinsics, this is not a string. Give up. if not isinstance(uri_data, string_types): LOG.debug("This Integration URI format is not supported: %s", uri_data) return None # uri_data is a string. # Let's check if it is actually a Lambda Integration URI and if so, extract the Function ARN matches = re.match(LambdaUri._REGEX_GET_FUNCTION_ARN, uri_data) if not matches or not matches.groups(): LOG.debug("Ignoring Integration URI because it is not a Lambda Function integration: %s", uri_data) return None groups = matches.groups() return groups[0]
[ "def", "_get_function_arn", "(", "uri_data", ")", ":", "if", "not", "uri_data", ":", "return", "None", "if", "LambdaUri", ".", "_is_sub_intrinsic", "(", "uri_data", ")", ":", "uri_data", "=", "LambdaUri", ".", "_resolve_fn_sub", "(", "uri_data", ")", "LOG", ".", "debug", "(", "\"Resolved Sub intrinsic function: %s\"", ",", "uri_data", ")", "# Even after processing intrinsics, this is not a string. Give up.", "if", "not", "isinstance", "(", "uri_data", ",", "string_types", ")", ":", "LOG", ".", "debug", "(", "\"This Integration URI format is not supported: %s\"", ",", "uri_data", ")", "return", "None", "# uri_data is a string.", "# Let's check if it is actually a Lambda Integration URI and if so, extract the Function ARN", "matches", "=", "re", ".", "match", "(", "LambdaUri", ".", "_REGEX_GET_FUNCTION_ARN", ",", "uri_data", ")", "if", "not", "matches", "or", "not", "matches", ".", "groups", "(", ")", ":", "LOG", ".", "debug", "(", "\"Ignoring Integration URI because it is not a Lambda Function integration: %s\"", ",", "uri_data", ")", "return", "None", "groups", "=", "matches", ".", "groups", "(", ")", "return", "groups", "[", "0", "]" ]
Integration URI can be expressed in various shapes and forms. This method normalizes the Integration URI ARN and returns the Lambda Function ARN. Here are the different forms of Integration URI ARN: - String: - Fully resolved ARN - ARN with Stage Variables: Ex: arn:aws:apigateway:ap-southeast-2:lambda:path/2015-03-31/functions/arn:aws:lambda:ap-southeast-2:123456789012:function:${stageVariables.PostFunctionName}/invocations # NOQA - Dictionary: Usually contains intrinsic functions - Fn::Sub: Example: { "Fn::Sub": "arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${LambdaFunction.Arn}/invocations" } - Fn::Join: **Unsupported**. It is very hard to combine the joins into one string especially when certain properties are resolved only at runtime. - Ref, Fn::GetAtt: **Unsupported**. Impossible to use these intrinsics with integration URI. CFN doesn't support this functionality. Note ~~~~ This method supports only a very restricted subset of intrinsic functions with Swagger document. This is the best we can do without implementing a full blown intrinsic function resolution module. Parameters ---------- uri_data : string or dict Value of Integration URI. It can either be a string or an intrinsic function that resolves to a string Returns ------- basestring or None Lambda Function ARN extracted from Integration URI. None, if it cannot get function Arn
[ "Integration", "URI", "can", "be", "expressed", "in", "various", "shapes", "and", "forms", ".", "This", "method", "normalizes", "the", "Integration", "URI", "ARN", "and", "returns", "the", "Lambda", "Function", "ARN", ".", "Here", "are", "the", "different", "forms", "of", "Integration", "URI", "ARN", ":" ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/commands/local/lib/swagger/integration_uri.py#L67-L128
train
awslabs/aws-sam-cli
samcli/commands/local/lib/swagger/integration_uri.py
LambdaUri._get_function_name_from_arn
def _get_function_name_from_arn(function_arn): """ Given the integration ARN, extract the Lambda function name from the ARN. If there are stage variables, or other unsupported formats, this function will return None. Parameters ---------- function_arn : basestring or None Function ARN from the swagger document Returns ------- basestring or None Function name of this integration. None if the ARN is not parsable """ if not function_arn: return None matches = re.match(LambdaUri._REGEX_GET_FUNCTION_NAME, function_arn) if not matches or not matches.groups(): LOG.debug("No Lambda function ARN defined for integration containing ARN %s", function_arn) return None groups = matches.groups() maybe_function_name = groups[0] # This regex has only one group match # Function name could be a real name or a stage variable or some unknown format if re.match(LambdaUri._REGEX_STAGE_VARIABLE, maybe_function_name): # yes, this is a stage variable LOG.debug("Stage variables are not supported. Ignoring integration with function ARN %s", function_arn) return None elif re.match(LambdaUri._REGEX_VALID_FUNCTION_NAME, maybe_function_name): # Yes, this is a real function name return maybe_function_name # Some unknown format LOG.debug("Ignoring integration ARN. Unable to parse Function Name from function arn %s", function_arn)
python
def _get_function_name_from_arn(function_arn): """ Given the integration ARN, extract the Lambda function name from the ARN. If there are stage variables, or other unsupported formats, this function will return None. Parameters ---------- function_arn : basestring or None Function ARN from the swagger document Returns ------- basestring or None Function name of this integration. None if the ARN is not parsable """ if not function_arn: return None matches = re.match(LambdaUri._REGEX_GET_FUNCTION_NAME, function_arn) if not matches or not matches.groups(): LOG.debug("No Lambda function ARN defined for integration containing ARN %s", function_arn) return None groups = matches.groups() maybe_function_name = groups[0] # This regex has only one group match # Function name could be a real name or a stage variable or some unknown format if re.match(LambdaUri._REGEX_STAGE_VARIABLE, maybe_function_name): # yes, this is a stage variable LOG.debug("Stage variables are not supported. Ignoring integration with function ARN %s", function_arn) return None elif re.match(LambdaUri._REGEX_VALID_FUNCTION_NAME, maybe_function_name): # Yes, this is a real function name return maybe_function_name # Some unknown format LOG.debug("Ignoring integration ARN. Unable to parse Function Name from function arn %s", function_arn)
[ "def", "_get_function_name_from_arn", "(", "function_arn", ")", ":", "if", "not", "function_arn", ":", "return", "None", "matches", "=", "re", ".", "match", "(", "LambdaUri", ".", "_REGEX_GET_FUNCTION_NAME", ",", "function_arn", ")", "if", "not", "matches", "or", "not", "matches", ".", "groups", "(", ")", ":", "LOG", ".", "debug", "(", "\"No Lambda function ARN defined for integration containing ARN %s\"", ",", "function_arn", ")", "return", "None", "groups", "=", "matches", ".", "groups", "(", ")", "maybe_function_name", "=", "groups", "[", "0", "]", "# This regex has only one group match", "# Function name could be a real name or a stage variable or some unknown format", "if", "re", ".", "match", "(", "LambdaUri", ".", "_REGEX_STAGE_VARIABLE", ",", "maybe_function_name", ")", ":", "# yes, this is a stage variable", "LOG", ".", "debug", "(", "\"Stage variables are not supported. Ignoring integration with function ARN %s\"", ",", "function_arn", ")", "return", "None", "elif", "re", ".", "match", "(", "LambdaUri", ".", "_REGEX_VALID_FUNCTION_NAME", ",", "maybe_function_name", ")", ":", "# Yes, this is a real function name", "return", "maybe_function_name", "# Some unknown format", "LOG", ".", "debug", "(", "\"Ignoring integration ARN. Unable to parse Function Name from function arn %s\"", ",", "function_arn", ")" ]
Given the integration ARN, extract the Lambda function name from the ARN. If there are stage variables, or other unsupported formats, this function will return None. Parameters ---------- function_arn : basestring or None Function ARN from the swagger document Returns ------- basestring or None Function name of this integration. None if the ARN is not parsable
[ "Given", "the", "integration", "ARN", "extract", "the", "Lambda", "function", "name", "from", "the", "ARN", ".", "If", "there", "are", "stage", "variables", "or", "other", "unsupported", "formats", "this", "function", "will", "return", "None", "." ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/commands/local/lib/swagger/integration_uri.py#L131-L170
train
awslabs/aws-sam-cli
samcli/commands/local/lib/swagger/integration_uri.py
LambdaUri._resolve_fn_sub
def _resolve_fn_sub(uri_data): """ Tries to resolve an Integration URI which contains Fn::Sub intrinsic function. This method tries to resolve and produce a string output. Example: { "Fn::Sub": "arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${LambdaFunction.Arn}/invocations" } Fn::Sub Processing: ~~~~~~~~~~~~~~~~~~ If this is a Fn::Sub, resolve as following: 1. Get the ARN String: - If Sub is using the array syntax, then use element which is a string. - If Sub is using string syntax, then just use the string. 2. If there is a ${XXX.Arn} then replace it with a dummy ARN 3. Otherwise skip it .. code: Input: { "Fn::Sub": "arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${LambdaFunction.Arn}/invocations" } Output: "arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/arn:aws:lambda:us-east-1:LambdaFunction/invocations" # NOQA Note ~~~~ This method supports only a very restricted subset of intrinsic functions with Swagger document. This is the best we can do without implementing a full blown intrinsic function resolution module. Parameters ---------- uri_data : string or dict Value of Integration URI. It can either be a string or an intrinsic function that resolves to a string Returns ------- string Integration URI as a string, if we were able to resolve the Sub intrinsic dict Input data is returned unmodified if we are unable to resolve the intrinsic """ # Try the short form of Fn::Sub syntax where the value is the ARN arn = uri_data[LambdaUri._FN_SUB] if isinstance(arn, list): # This is the long form of Fn::Sub syntax # # { # "Fn::Sub":[ "arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${MyARn}/invocations", # {"MyARn": {"Ref": MyFunction"} # ] # } # # Get the ARN out of the list arn = arn[0] if not isinstance(arn, string_types): # Even after all the processing, ARN is still not a string. Probably customer provided wrong syntax # for Fn::Sub. Let's skip this altogether LOG.debug("Unable to resolve Fn::Sub value for integration URI: %s", uri_data) return uri_data # Now finally we got the ARN string. Let us try to resolve it. # We only support value of type ${XXX.Arn} or ${YYY.Alias}. The `.Alias` syntax is a SAM specific intrinsic # to get ARN of Lambda Alias when using DeploymentPreference lambda_function_arn_template = r'arn:aws:lambda:${AWS::Region}:123456789012:function:\1' return re.sub(LambdaUri._REGEX_SUB_FUNCTION_ARN, # Find all ${blah} patterns # Replace with Lambda Function ARN, where function name is from pattern lambda_function_arn_template, arn)
python
def _resolve_fn_sub(uri_data): """ Tries to resolve an Integration URI which contains Fn::Sub intrinsic function. This method tries to resolve and produce a string output. Example: { "Fn::Sub": "arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${LambdaFunction.Arn}/invocations" } Fn::Sub Processing: ~~~~~~~~~~~~~~~~~~ If this is a Fn::Sub, resolve as following: 1. Get the ARN String: - If Sub is using the array syntax, then use element which is a string. - If Sub is using string syntax, then just use the string. 2. If there is a ${XXX.Arn} then replace it with a dummy ARN 3. Otherwise skip it .. code: Input: { "Fn::Sub": "arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${LambdaFunction.Arn}/invocations" } Output: "arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/arn:aws:lambda:us-east-1:LambdaFunction/invocations" # NOQA Note ~~~~ This method supports only a very restricted subset of intrinsic functions with Swagger document. This is the best we can do without implementing a full blown intrinsic function resolution module. Parameters ---------- uri_data : string or dict Value of Integration URI. It can either be a string or an intrinsic function that resolves to a string Returns ------- string Integration URI as a string, if we were able to resolve the Sub intrinsic dict Input data is returned unmodified if we are unable to resolve the intrinsic """ # Try the short form of Fn::Sub syntax where the value is the ARN arn = uri_data[LambdaUri._FN_SUB] if isinstance(arn, list): # This is the long form of Fn::Sub syntax # # { # "Fn::Sub":[ "arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${MyARn}/invocations", # {"MyARn": {"Ref": MyFunction"} # ] # } # # Get the ARN out of the list arn = arn[0] if not isinstance(arn, string_types): # Even after all the processing, ARN is still not a string. Probably customer provided wrong syntax # for Fn::Sub. Let's skip this altogether LOG.debug("Unable to resolve Fn::Sub value for integration URI: %s", uri_data) return uri_data # Now finally we got the ARN string. Let us try to resolve it. # We only support value of type ${XXX.Arn} or ${YYY.Alias}. The `.Alias` syntax is a SAM specific intrinsic # to get ARN of Lambda Alias when using DeploymentPreference lambda_function_arn_template = r'arn:aws:lambda:${AWS::Region}:123456789012:function:\1' return re.sub(LambdaUri._REGEX_SUB_FUNCTION_ARN, # Find all ${blah} patterns # Replace with Lambda Function ARN, where function name is from pattern lambda_function_arn_template, arn)
[ "def", "_resolve_fn_sub", "(", "uri_data", ")", ":", "# Try the short form of Fn::Sub syntax where the value is the ARN", "arn", "=", "uri_data", "[", "LambdaUri", ".", "_FN_SUB", "]", "if", "isinstance", "(", "arn", ",", "list", ")", ":", "# This is the long form of Fn::Sub syntax", "#", "# {", "# \"Fn::Sub\":[ \"arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${MyARn}/invocations\",", "# {\"MyARn\": {\"Ref\": MyFunction\"}", "# ]", "# }", "#", "# Get the ARN out of the list", "arn", "=", "arn", "[", "0", "]", "if", "not", "isinstance", "(", "arn", ",", "string_types", ")", ":", "# Even after all the processing, ARN is still not a string. Probably customer provided wrong syntax", "# for Fn::Sub. Let's skip this altogether", "LOG", ".", "debug", "(", "\"Unable to resolve Fn::Sub value for integration URI: %s\"", ",", "uri_data", ")", "return", "uri_data", "# Now finally we got the ARN string. Let us try to resolve it.", "# We only support value of type ${XXX.Arn} or ${YYY.Alias}. The `.Alias` syntax is a SAM specific intrinsic", "# to get ARN of Lambda Alias when using DeploymentPreference", "lambda_function_arn_template", "=", "r'arn:aws:lambda:${AWS::Region}:123456789012:function:\\1'", "return", "re", ".", "sub", "(", "LambdaUri", ".", "_REGEX_SUB_FUNCTION_ARN", ",", "# Find all ${blah} patterns", "# Replace with Lambda Function ARN, where function name is from pattern", "lambda_function_arn_template", ",", "arn", ")" ]
Tries to resolve an Integration URI which contains Fn::Sub intrinsic function. This method tries to resolve and produce a string output. Example: { "Fn::Sub": "arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${LambdaFunction.Arn}/invocations" } Fn::Sub Processing: ~~~~~~~~~~~~~~~~~~ If this is a Fn::Sub, resolve as following: 1. Get the ARN String: - If Sub is using the array syntax, then use element which is a string. - If Sub is using string syntax, then just use the string. 2. If there is a ${XXX.Arn} then replace it with a dummy ARN 3. Otherwise skip it .. code: Input: { "Fn::Sub": "arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${LambdaFunction.Arn}/invocations" } Output: "arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/arn:aws:lambda:us-east-1:LambdaFunction/invocations" # NOQA Note ~~~~ This method supports only a very restricted subset of intrinsic functions with Swagger document. This is the best we can do without implementing a full blown intrinsic function resolution module. Parameters ---------- uri_data : string or dict Value of Integration URI. It can either be a string or an intrinsic function that resolves to a string Returns ------- string Integration URI as a string, if we were able to resolve the Sub intrinsic dict Input data is returned unmodified if we are unable to resolve the intrinsic
[ "Tries", "to", "resolve", "an", "Integration", "URI", "which", "contains", "Fn", "::", "Sub", "intrinsic", "function", ".", "This", "method", "tries", "to", "resolve", "and", "produce", "a", "string", "output", "." ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/commands/local/lib/swagger/integration_uri.py#L173-L250
train
awslabs/aws-sam-cli
samcli/commands/local/lib/swagger/integration_uri.py
LambdaUri._is_sub_intrinsic
def _is_sub_intrinsic(data): """ Is this input data a Fn::Sub intrinsic function Parameters ---------- data Data to check Returns ------- bool True if the data Fn::Sub intrinsic function """ return isinstance(data, dict) and len(data) == 1 and LambdaUri._FN_SUB in data
python
def _is_sub_intrinsic(data): """ Is this input data a Fn::Sub intrinsic function Parameters ---------- data Data to check Returns ------- bool True if the data Fn::Sub intrinsic function """ return isinstance(data, dict) and len(data) == 1 and LambdaUri._FN_SUB in data
[ "def", "_is_sub_intrinsic", "(", "data", ")", ":", "return", "isinstance", "(", "data", ",", "dict", ")", "and", "len", "(", "data", ")", "==", "1", "and", "LambdaUri", ".", "_FN_SUB", "in", "data" ]
Is this input data a Fn::Sub intrinsic function Parameters ---------- data Data to check Returns ------- bool True if the data Fn::Sub intrinsic function
[ "Is", "this", "input", "data", "a", "Fn", "::", "Sub", "intrinsic", "function" ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/commands/local/lib/swagger/integration_uri.py#L253-L267
train
awslabs/aws-sam-cli
samcli/commands/local/invoke/cli.py
do_cli
def do_cli(ctx, function_identifier, template, event, no_event, env_vars, debug_port, # pylint: disable=R0914 debug_args, debugger_path, docker_volume_basedir, docker_network, log_file, layer_cache_basedir, skip_pull_image, force_image_build, parameter_overrides): """ Implementation of the ``cli`` method, just separated out for unit testing purposes """ LOG.debug("local invoke command is called") if no_event and event != STDIN_FILE_NAME: # Do not know what the user wants. no_event and event both passed in. raise UserException("no_event and event cannot be used together. Please provide only one.") if no_event: event_data = "{}" else: event_data = _get_event(event) # Pass all inputs to setup necessary context to invoke function locally. # Handler exception raised by the processor for invalid args and print errors try: with InvokeContext(template_file=template, function_identifier=function_identifier, env_vars_file=env_vars, docker_volume_basedir=docker_volume_basedir, docker_network=docker_network, log_file=log_file, skip_pull_image=skip_pull_image, debug_port=debug_port, debug_args=debug_args, debugger_path=debugger_path, parameter_overrides=parameter_overrides, layer_cache_basedir=layer_cache_basedir, force_image_build=force_image_build, aws_region=ctx.region) as context: # Invoke the function context.local_lambda_runner.invoke(context.function_name, event=event_data, stdout=context.stdout, stderr=context.stderr) except FunctionNotFound: raise UserException("Function {} not found in template".format(function_identifier)) except (InvalidSamDocumentException, OverridesNotWellDefinedError, InvalidLayerReference, DebuggingNotSupported) as ex: raise UserException(str(ex)) except DockerImagePullFailedException as ex: raise UserException(str(ex))
python
def do_cli(ctx, function_identifier, template, event, no_event, env_vars, debug_port, # pylint: disable=R0914 debug_args, debugger_path, docker_volume_basedir, docker_network, log_file, layer_cache_basedir, skip_pull_image, force_image_build, parameter_overrides): """ Implementation of the ``cli`` method, just separated out for unit testing purposes """ LOG.debug("local invoke command is called") if no_event and event != STDIN_FILE_NAME: # Do not know what the user wants. no_event and event both passed in. raise UserException("no_event and event cannot be used together. Please provide only one.") if no_event: event_data = "{}" else: event_data = _get_event(event) # Pass all inputs to setup necessary context to invoke function locally. # Handler exception raised by the processor for invalid args and print errors try: with InvokeContext(template_file=template, function_identifier=function_identifier, env_vars_file=env_vars, docker_volume_basedir=docker_volume_basedir, docker_network=docker_network, log_file=log_file, skip_pull_image=skip_pull_image, debug_port=debug_port, debug_args=debug_args, debugger_path=debugger_path, parameter_overrides=parameter_overrides, layer_cache_basedir=layer_cache_basedir, force_image_build=force_image_build, aws_region=ctx.region) as context: # Invoke the function context.local_lambda_runner.invoke(context.function_name, event=event_data, stdout=context.stdout, stderr=context.stderr) except FunctionNotFound: raise UserException("Function {} not found in template".format(function_identifier)) except (InvalidSamDocumentException, OverridesNotWellDefinedError, InvalidLayerReference, DebuggingNotSupported) as ex: raise UserException(str(ex)) except DockerImagePullFailedException as ex: raise UserException(str(ex))
[ "def", "do_cli", "(", "ctx", ",", "function_identifier", ",", "template", ",", "event", ",", "no_event", ",", "env_vars", ",", "debug_port", ",", "# pylint: disable=R0914", "debug_args", ",", "debugger_path", ",", "docker_volume_basedir", ",", "docker_network", ",", "log_file", ",", "layer_cache_basedir", ",", "skip_pull_image", ",", "force_image_build", ",", "parameter_overrides", ")", ":", "LOG", ".", "debug", "(", "\"local invoke command is called\"", ")", "if", "no_event", "and", "event", "!=", "STDIN_FILE_NAME", ":", "# Do not know what the user wants. no_event and event both passed in.", "raise", "UserException", "(", "\"no_event and event cannot be used together. Please provide only one.\"", ")", "if", "no_event", ":", "event_data", "=", "\"{}\"", "else", ":", "event_data", "=", "_get_event", "(", "event", ")", "# Pass all inputs to setup necessary context to invoke function locally.", "# Handler exception raised by the processor for invalid args and print errors", "try", ":", "with", "InvokeContext", "(", "template_file", "=", "template", ",", "function_identifier", "=", "function_identifier", ",", "env_vars_file", "=", "env_vars", ",", "docker_volume_basedir", "=", "docker_volume_basedir", ",", "docker_network", "=", "docker_network", ",", "log_file", "=", "log_file", ",", "skip_pull_image", "=", "skip_pull_image", ",", "debug_port", "=", "debug_port", ",", "debug_args", "=", "debug_args", ",", "debugger_path", "=", "debugger_path", ",", "parameter_overrides", "=", "parameter_overrides", ",", "layer_cache_basedir", "=", "layer_cache_basedir", ",", "force_image_build", "=", "force_image_build", ",", "aws_region", "=", "ctx", ".", "region", ")", "as", "context", ":", "# Invoke the function", "context", ".", "local_lambda_runner", ".", "invoke", "(", "context", ".", "function_name", ",", "event", "=", "event_data", ",", "stdout", "=", "context", ".", "stdout", ",", "stderr", "=", "context", ".", "stderr", ")", "except", "FunctionNotFound", ":", "raise", "UserException", "(", "\"Function {} not found in template\"", ".", "format", "(", "function_identifier", ")", ")", "except", "(", "InvalidSamDocumentException", ",", "OverridesNotWellDefinedError", ",", "InvalidLayerReference", ",", "DebuggingNotSupported", ")", "as", "ex", ":", "raise", "UserException", "(", "str", "(", "ex", ")", ")", "except", "DockerImagePullFailedException", "as", "ex", ":", "raise", "UserException", "(", "str", "(", "ex", ")", ")" ]
Implementation of the ``cli`` method, just separated out for unit testing purposes
[ "Implementation", "of", "the", "cli", "method", "just", "separated", "out", "for", "unit", "testing", "purposes" ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/commands/local/invoke/cli.py#L59-L109
train
awslabs/aws-sam-cli
samcli/commands/local/invoke/cli.py
_get_event
def _get_event(event_file_name): """ Read the event JSON data from the given file. If no file is provided, read the event from stdin. :param string event_file_name: Path to event file, or '-' for stdin :return string: Contents of the event file or stdin """ if event_file_name == STDIN_FILE_NAME: # If event is empty, listen to stdin for event data until EOF LOG.info("Reading invoke payload from stdin (you can also pass it from file with --event)") # click.open_file knows to open stdin when filename is '-'. This is safer than manually opening streams, and # accidentally closing a standard stream with click.open_file(event_file_name, 'r') as fp: return fp.read()
python
def _get_event(event_file_name): """ Read the event JSON data from the given file. If no file is provided, read the event from stdin. :param string event_file_name: Path to event file, or '-' for stdin :return string: Contents of the event file or stdin """ if event_file_name == STDIN_FILE_NAME: # If event is empty, listen to stdin for event data until EOF LOG.info("Reading invoke payload from stdin (you can also pass it from file with --event)") # click.open_file knows to open stdin when filename is '-'. This is safer than manually opening streams, and # accidentally closing a standard stream with click.open_file(event_file_name, 'r') as fp: return fp.read()
[ "def", "_get_event", "(", "event_file_name", ")", ":", "if", "event_file_name", "==", "STDIN_FILE_NAME", ":", "# If event is empty, listen to stdin for event data until EOF", "LOG", ".", "info", "(", "\"Reading invoke payload from stdin (you can also pass it from file with --event)\"", ")", "# click.open_file knows to open stdin when filename is '-'. This is safer than manually opening streams, and", "# accidentally closing a standard stream", "with", "click", ".", "open_file", "(", "event_file_name", ",", "'r'", ")", "as", "fp", ":", "return", "fp", ".", "read", "(", ")" ]
Read the event JSON data from the given file. If no file is provided, read the event from stdin. :param string event_file_name: Path to event file, or '-' for stdin :return string: Contents of the event file or stdin
[ "Read", "the", "event", "JSON", "data", "from", "the", "given", "file", ".", "If", "no", "file", "is", "provided", "read", "the", "event", "from", "stdin", "." ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/commands/local/invoke/cli.py#L112-L127
train
awslabs/aws-sam-cli
samcli/lib/samlib/resource_metadata_normalizer.py
ResourceMetadataNormalizer.normalize
def normalize(template_dict): """ Normalize all Resources in the template with the Metadata Key on the resource. This method will mutate the template Parameters ---------- template_dict dict Dictionary representing the template """ resources = template_dict.get(RESOURCES_KEY, {}) for logical_id, resource in resources.items(): resource_metadata = resource.get(METADATA_KEY, {}) asset_path = resource_metadata.get(ASSET_PATH_METADATA_KEY) asset_property = resource_metadata.get(ASSET_PROPERTY_METADATA_KEY) ResourceMetadataNormalizer._replace_property(asset_property, asset_path, resource, logical_id)
python
def normalize(template_dict): """ Normalize all Resources in the template with the Metadata Key on the resource. This method will mutate the template Parameters ---------- template_dict dict Dictionary representing the template """ resources = template_dict.get(RESOURCES_KEY, {}) for logical_id, resource in resources.items(): resource_metadata = resource.get(METADATA_KEY, {}) asset_path = resource_metadata.get(ASSET_PATH_METADATA_KEY) asset_property = resource_metadata.get(ASSET_PROPERTY_METADATA_KEY) ResourceMetadataNormalizer._replace_property(asset_property, asset_path, resource, logical_id)
[ "def", "normalize", "(", "template_dict", ")", ":", "resources", "=", "template_dict", ".", "get", "(", "RESOURCES_KEY", ",", "{", "}", ")", "for", "logical_id", ",", "resource", "in", "resources", ".", "items", "(", ")", ":", "resource_metadata", "=", "resource", ".", "get", "(", "METADATA_KEY", ",", "{", "}", ")", "asset_path", "=", "resource_metadata", ".", "get", "(", "ASSET_PATH_METADATA_KEY", ")", "asset_property", "=", "resource_metadata", ".", "get", "(", "ASSET_PROPERTY_METADATA_KEY", ")", "ResourceMetadataNormalizer", ".", "_replace_property", "(", "asset_property", ",", "asset_path", ",", "resource", ",", "logical_id", ")" ]
Normalize all Resources in the template with the Metadata Key on the resource. This method will mutate the template Parameters ---------- template_dict dict Dictionary representing the template
[ "Normalize", "all", "Resources", "in", "the", "template", "with", "the", "Metadata", "Key", "on", "the", "resource", "." ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/lib/samlib/resource_metadata_normalizer.py#L19-L38
train
awslabs/aws-sam-cli
samcli/lib/samlib/resource_metadata_normalizer.py
ResourceMetadataNormalizer._replace_property
def _replace_property(property_key, property_value, resource, logical_id): """ Replace a property with an asset on a given resource This method will mutate the template Parameters ---------- property str The property to replace on the resource property_value str The new value of the property resource dict Dictionary representing the Resource to change logical_id str LogicalId of the Resource """ if property_key and property_value: resource.get(PROPERTIES_KEY, {})[property_key] = property_value elif property_key or property_value: LOG.info("WARNING: Ignoring Metadata for Resource %s. Metadata contains only aws:asset:path or " "aws:assert:property but not both", logical_id)
python
def _replace_property(property_key, property_value, resource, logical_id): """ Replace a property with an asset on a given resource This method will mutate the template Parameters ---------- property str The property to replace on the resource property_value str The new value of the property resource dict Dictionary representing the Resource to change logical_id str LogicalId of the Resource """ if property_key and property_value: resource.get(PROPERTIES_KEY, {})[property_key] = property_value elif property_key or property_value: LOG.info("WARNING: Ignoring Metadata for Resource %s. Metadata contains only aws:asset:path or " "aws:assert:property but not both", logical_id)
[ "def", "_replace_property", "(", "property_key", ",", "property_value", ",", "resource", ",", "logical_id", ")", ":", "if", "property_key", "and", "property_value", ":", "resource", ".", "get", "(", "PROPERTIES_KEY", ",", "{", "}", ")", "[", "property_key", "]", "=", "property_value", "elif", "property_key", "or", "property_value", ":", "LOG", ".", "info", "(", "\"WARNING: Ignoring Metadata for Resource %s. Metadata contains only aws:asset:path or \"", "\"aws:assert:property but not both\"", ",", "logical_id", ")" ]
Replace a property with an asset on a given resource This method will mutate the template Parameters ---------- property str The property to replace on the resource property_value str The new value of the property resource dict Dictionary representing the Resource to change logical_id str LogicalId of the Resource
[ "Replace", "a", "property", "with", "an", "asset", "on", "a", "given", "resource" ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/lib/samlib/resource_metadata_normalizer.py#L41-L63
train
awslabs/aws-sam-cli
samcli/cli/command.py
BaseCommand._set_commands
def _set_commands(package_names): """ Extract the command name from package name. Last part of the module path is the command ie. if path is foo.bar.baz, then "baz" is the command name. :param package_names: List of package names :return: Dictionary with command name as key and the package name as value. """ commands = {} for pkg_name in package_names: cmd_name = pkg_name.split('.')[-1] commands[cmd_name] = pkg_name return commands
python
def _set_commands(package_names): """ Extract the command name from package name. Last part of the module path is the command ie. if path is foo.bar.baz, then "baz" is the command name. :param package_names: List of package names :return: Dictionary with command name as key and the package name as value. """ commands = {} for pkg_name in package_names: cmd_name = pkg_name.split('.')[-1] commands[cmd_name] = pkg_name return commands
[ "def", "_set_commands", "(", "package_names", ")", ":", "commands", "=", "{", "}", "for", "pkg_name", "in", "package_names", ":", "cmd_name", "=", "pkg_name", ".", "split", "(", "'.'", ")", "[", "-", "1", "]", "commands", "[", "cmd_name", "]", "=", "pkg_name", "return", "commands" ]
Extract the command name from package name. Last part of the module path is the command ie. if path is foo.bar.baz, then "baz" is the command name. :param package_names: List of package names :return: Dictionary with command name as key and the package name as value.
[ "Extract", "the", "command", "name", "from", "package", "name", ".", "Last", "part", "of", "the", "module", "path", "is", "the", "command", "ie", ".", "if", "path", "is", "foo", ".", "bar", ".", "baz", "then", "baz", "is", "the", "command", "name", "." ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/cli/command.py#L62-L77
train
awslabs/aws-sam-cli
samcli/cli/command.py
BaseCommand.get_command
def get_command(self, ctx, cmd_name): """ Overrides method from ``click.MultiCommand`` that returns Click CLI object for given command name, if found. :param ctx: Click context :param cmd_name: Top-level command name :return: Click object representing the command """ if cmd_name not in self._commands: logger.error("Command %s not available", cmd_name) return pkg_name = self._commands[cmd_name] try: mod = importlib.import_module(pkg_name) except ImportError: logger.exception("Command '%s' is not configured correctly. Unable to import '%s'", cmd_name, pkg_name) return if not hasattr(mod, "cli"): logger.error("Command %s is not configured correctly. It must expose an function called 'cli'", cmd_name) return return mod.cli
python
def get_command(self, ctx, cmd_name): """ Overrides method from ``click.MultiCommand`` that returns Click CLI object for given command name, if found. :param ctx: Click context :param cmd_name: Top-level command name :return: Click object representing the command """ if cmd_name not in self._commands: logger.error("Command %s not available", cmd_name) return pkg_name = self._commands[cmd_name] try: mod = importlib.import_module(pkg_name) except ImportError: logger.exception("Command '%s' is not configured correctly. Unable to import '%s'", cmd_name, pkg_name) return if not hasattr(mod, "cli"): logger.error("Command %s is not configured correctly. It must expose an function called 'cli'", cmd_name) return return mod.cli
[ "def", "get_command", "(", "self", ",", "ctx", ",", "cmd_name", ")", ":", "if", "cmd_name", "not", "in", "self", ".", "_commands", ":", "logger", ".", "error", "(", "\"Command %s not available\"", ",", "cmd_name", ")", "return", "pkg_name", "=", "self", ".", "_commands", "[", "cmd_name", "]", "try", ":", "mod", "=", "importlib", ".", "import_module", "(", "pkg_name", ")", "except", "ImportError", ":", "logger", ".", "exception", "(", "\"Command '%s' is not configured correctly. Unable to import '%s'\"", ",", "cmd_name", ",", "pkg_name", ")", "return", "if", "not", "hasattr", "(", "mod", ",", "\"cli\"", ")", ":", "logger", ".", "error", "(", "\"Command %s is not configured correctly. It must expose an function called 'cli'\"", ",", "cmd_name", ")", "return", "return", "mod", ".", "cli" ]
Overrides method from ``click.MultiCommand`` that returns Click CLI object for given command name, if found. :param ctx: Click context :param cmd_name: Top-level command name :return: Click object representing the command
[ "Overrides", "method", "from", "click", ".", "MultiCommand", "that", "returns", "Click", "CLI", "object", "for", "given", "command", "name", "if", "found", "." ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/cli/command.py#L88-L112
train
awslabs/aws-sam-cli
samcli/lib/utils/stream_writer.py
StreamWriter.write
def write(self, output): """ Writes specified text to the underlying stream Parameters ---------- output bytes-like object Bytes to write """ self._stream.write(output) if self._auto_flush: self._stream.flush()
python
def write(self, output): """ Writes specified text to the underlying stream Parameters ---------- output bytes-like object Bytes to write """ self._stream.write(output) if self._auto_flush: self._stream.flush()
[ "def", "write", "(", "self", ",", "output", ")", ":", "self", ".", "_stream", ".", "write", "(", "output", ")", "if", "self", ".", "_auto_flush", ":", "self", ".", "_stream", ".", "flush", "(", ")" ]
Writes specified text to the underlying stream Parameters ---------- output bytes-like object Bytes to write
[ "Writes", "specified", "text", "to", "the", "underlying", "stream" ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/lib/utils/stream_writer.py#L22-L34
train
awslabs/aws-sam-cli
samcli/lib/build/workflow_config.py
get_workflow_config
def get_workflow_config(runtime, code_dir, project_dir): """ Get a workflow config that corresponds to the runtime provided. This method examines contents of the project and code directories to determine the most appropriate workflow for the given runtime. Currently the decision is based on the presence of a supported manifest file. For runtimes that have more than one workflow, we choose a workflow by examining ``code_dir`` followed by ``project_dir`` for presence of a supported manifest. Parameters ---------- runtime str The runtime of the config code_dir str Directory where Lambda function code is present project_dir str Root of the Serverless application project. Returns ------- namedtuple(Capability) namedtuple that represents the Builder Workflow Config """ selectors_by_runtime = { "python2.7": BasicWorkflowSelector(PYTHON_PIP_CONFIG), "python3.6": BasicWorkflowSelector(PYTHON_PIP_CONFIG), "python3.7": BasicWorkflowSelector(PYTHON_PIP_CONFIG), "nodejs4.3": BasicWorkflowSelector(NODEJS_NPM_CONFIG), "nodejs6.10": BasicWorkflowSelector(NODEJS_NPM_CONFIG), "nodejs8.10": BasicWorkflowSelector(NODEJS_NPM_CONFIG), "ruby2.5": BasicWorkflowSelector(RUBY_BUNDLER_CONFIG), "dotnetcore2.0": BasicWorkflowSelector(DOTNET_CLIPACKAGE_CONFIG), "dotnetcore2.1": BasicWorkflowSelector(DOTNET_CLIPACKAGE_CONFIG), # When Maven builder exists, add to this list so we can automatically choose a builder based on the supported # manifest "java8": ManifestWorkflowSelector([ # Gradle builder needs custom executable paths to find `gradlew` binary JAVA_GRADLE_CONFIG._replace(executable_search_paths=[code_dir, project_dir]), JAVA_KOTLIN_GRADLE_CONFIG._replace(executable_search_paths=[code_dir, project_dir]), JAVA_MAVEN_CONFIG ]), } if runtime not in selectors_by_runtime: raise UnsupportedRuntimeException("'{}' runtime is not supported".format(runtime)) selector = selectors_by_runtime[runtime] try: config = selector.get_config(code_dir, project_dir) return config except ValueError as ex: raise UnsupportedRuntimeException("Unable to find a supported build workflow for runtime '{}'. Reason: {}" .format(runtime, str(ex)))
python
def get_workflow_config(runtime, code_dir, project_dir): """ Get a workflow config that corresponds to the runtime provided. This method examines contents of the project and code directories to determine the most appropriate workflow for the given runtime. Currently the decision is based on the presence of a supported manifest file. For runtimes that have more than one workflow, we choose a workflow by examining ``code_dir`` followed by ``project_dir`` for presence of a supported manifest. Parameters ---------- runtime str The runtime of the config code_dir str Directory where Lambda function code is present project_dir str Root of the Serverless application project. Returns ------- namedtuple(Capability) namedtuple that represents the Builder Workflow Config """ selectors_by_runtime = { "python2.7": BasicWorkflowSelector(PYTHON_PIP_CONFIG), "python3.6": BasicWorkflowSelector(PYTHON_PIP_CONFIG), "python3.7": BasicWorkflowSelector(PYTHON_PIP_CONFIG), "nodejs4.3": BasicWorkflowSelector(NODEJS_NPM_CONFIG), "nodejs6.10": BasicWorkflowSelector(NODEJS_NPM_CONFIG), "nodejs8.10": BasicWorkflowSelector(NODEJS_NPM_CONFIG), "ruby2.5": BasicWorkflowSelector(RUBY_BUNDLER_CONFIG), "dotnetcore2.0": BasicWorkflowSelector(DOTNET_CLIPACKAGE_CONFIG), "dotnetcore2.1": BasicWorkflowSelector(DOTNET_CLIPACKAGE_CONFIG), # When Maven builder exists, add to this list so we can automatically choose a builder based on the supported # manifest "java8": ManifestWorkflowSelector([ # Gradle builder needs custom executable paths to find `gradlew` binary JAVA_GRADLE_CONFIG._replace(executable_search_paths=[code_dir, project_dir]), JAVA_KOTLIN_GRADLE_CONFIG._replace(executable_search_paths=[code_dir, project_dir]), JAVA_MAVEN_CONFIG ]), } if runtime not in selectors_by_runtime: raise UnsupportedRuntimeException("'{}' runtime is not supported".format(runtime)) selector = selectors_by_runtime[runtime] try: config = selector.get_config(code_dir, project_dir) return config except ValueError as ex: raise UnsupportedRuntimeException("Unable to find a supported build workflow for runtime '{}'. Reason: {}" .format(runtime, str(ex)))
[ "def", "get_workflow_config", "(", "runtime", ",", "code_dir", ",", "project_dir", ")", ":", "selectors_by_runtime", "=", "{", "\"python2.7\"", ":", "BasicWorkflowSelector", "(", "PYTHON_PIP_CONFIG", ")", ",", "\"python3.6\"", ":", "BasicWorkflowSelector", "(", "PYTHON_PIP_CONFIG", ")", ",", "\"python3.7\"", ":", "BasicWorkflowSelector", "(", "PYTHON_PIP_CONFIG", ")", ",", "\"nodejs4.3\"", ":", "BasicWorkflowSelector", "(", "NODEJS_NPM_CONFIG", ")", ",", "\"nodejs6.10\"", ":", "BasicWorkflowSelector", "(", "NODEJS_NPM_CONFIG", ")", ",", "\"nodejs8.10\"", ":", "BasicWorkflowSelector", "(", "NODEJS_NPM_CONFIG", ")", ",", "\"ruby2.5\"", ":", "BasicWorkflowSelector", "(", "RUBY_BUNDLER_CONFIG", ")", ",", "\"dotnetcore2.0\"", ":", "BasicWorkflowSelector", "(", "DOTNET_CLIPACKAGE_CONFIG", ")", ",", "\"dotnetcore2.1\"", ":", "BasicWorkflowSelector", "(", "DOTNET_CLIPACKAGE_CONFIG", ")", ",", "# When Maven builder exists, add to this list so we can automatically choose a builder based on the supported", "# manifest", "\"java8\"", ":", "ManifestWorkflowSelector", "(", "[", "# Gradle builder needs custom executable paths to find `gradlew` binary", "JAVA_GRADLE_CONFIG", ".", "_replace", "(", "executable_search_paths", "=", "[", "code_dir", ",", "project_dir", "]", ")", ",", "JAVA_KOTLIN_GRADLE_CONFIG", ".", "_replace", "(", "executable_search_paths", "=", "[", "code_dir", ",", "project_dir", "]", ")", ",", "JAVA_MAVEN_CONFIG", "]", ")", ",", "}", "if", "runtime", "not", "in", "selectors_by_runtime", ":", "raise", "UnsupportedRuntimeException", "(", "\"'{}' runtime is not supported\"", ".", "format", "(", "runtime", ")", ")", "selector", "=", "selectors_by_runtime", "[", "runtime", "]", "try", ":", "config", "=", "selector", ".", "get_config", "(", "code_dir", ",", "project_dir", ")", "return", "config", "except", "ValueError", "as", "ex", ":", "raise", "UnsupportedRuntimeException", "(", "\"Unable to find a supported build workflow for runtime '{}'. Reason: {}\"", ".", "format", "(", "runtime", ",", "str", "(", "ex", ")", ")", ")" ]
Get a workflow config that corresponds to the runtime provided. This method examines contents of the project and code directories to determine the most appropriate workflow for the given runtime. Currently the decision is based on the presence of a supported manifest file. For runtimes that have more than one workflow, we choose a workflow by examining ``code_dir`` followed by ``project_dir`` for presence of a supported manifest. Parameters ---------- runtime str The runtime of the config code_dir str Directory where Lambda function code is present project_dir str Root of the Serverless application project. Returns ------- namedtuple(Capability) namedtuple that represents the Builder Workflow Config
[ "Get", "a", "workflow", "config", "that", "corresponds", "to", "the", "runtime", "provided", ".", "This", "method", "examines", "contents", "of", "the", "project", "and", "code", "directories", "to", "determine", "the", "most", "appropriate", "workflow", "for", "the", "given", "runtime", ".", "Currently", "the", "decision", "is", "based", "on", "the", "presence", "of", "a", "supported", "manifest", "file", ".", "For", "runtimes", "that", "have", "more", "than", "one", "workflow", "we", "choose", "a", "workflow", "by", "examining", "code_dir", "followed", "by", "project_dir", "for", "presence", "of", "a", "supported", "manifest", "." ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/lib/build/workflow_config.py#L70-L125
train
awslabs/aws-sam-cli
samcli/lib/build/workflow_config.py
supports_build_in_container
def supports_build_in_container(config): """ Given a workflow config, this method provides a boolean on whether the workflow can run within a container or not. Parameters ---------- config namedtuple(Capability) Config specifying the particular build workflow Returns ------- tuple(bool, str) True, if this workflow can be built inside a container. False, along with a reason message if it cannot be. """ def _key(c): return str(c.language) + str(c.dependency_manager) + str(c.application_framework) # This information could have beeen bundled inside the Workflow Config object. But we this way because # ultimately the workflow's implementation dictates whether it can run within a container or not. # A "workflow config" is like a primary key to identify the workflow. So we use the config as a key in the # map to identify which workflows can support building within a container. unsupported = { _key(DOTNET_CLIPACKAGE_CONFIG): "We do not support building .NET Core Lambda functions within a container. " "Try building without the container. Most .NET Core functions will build " "successfully.", } thiskey = _key(config) if thiskey in unsupported: return False, unsupported[thiskey] return True, None
python
def supports_build_in_container(config): """ Given a workflow config, this method provides a boolean on whether the workflow can run within a container or not. Parameters ---------- config namedtuple(Capability) Config specifying the particular build workflow Returns ------- tuple(bool, str) True, if this workflow can be built inside a container. False, along with a reason message if it cannot be. """ def _key(c): return str(c.language) + str(c.dependency_manager) + str(c.application_framework) # This information could have beeen bundled inside the Workflow Config object. But we this way because # ultimately the workflow's implementation dictates whether it can run within a container or not. # A "workflow config" is like a primary key to identify the workflow. So we use the config as a key in the # map to identify which workflows can support building within a container. unsupported = { _key(DOTNET_CLIPACKAGE_CONFIG): "We do not support building .NET Core Lambda functions within a container. " "Try building without the container. Most .NET Core functions will build " "successfully.", } thiskey = _key(config) if thiskey in unsupported: return False, unsupported[thiskey] return True, None
[ "def", "supports_build_in_container", "(", "config", ")", ":", "def", "_key", "(", "c", ")", ":", "return", "str", "(", "c", ".", "language", ")", "+", "str", "(", "c", ".", "dependency_manager", ")", "+", "str", "(", "c", ".", "application_framework", ")", "# This information could have beeen bundled inside the Workflow Config object. But we this way because", "# ultimately the workflow's implementation dictates whether it can run within a container or not.", "# A \"workflow config\" is like a primary key to identify the workflow. So we use the config as a key in the", "# map to identify which workflows can support building within a container.", "unsupported", "=", "{", "_key", "(", "DOTNET_CLIPACKAGE_CONFIG", ")", ":", "\"We do not support building .NET Core Lambda functions within a container. \"", "\"Try building without the container. Most .NET Core functions will build \"", "\"successfully.\"", ",", "}", "thiskey", "=", "_key", "(", "config", ")", "if", "thiskey", "in", "unsupported", ":", "return", "False", ",", "unsupported", "[", "thiskey", "]", "return", "True", ",", "None" ]
Given a workflow config, this method provides a boolean on whether the workflow can run within a container or not. Parameters ---------- config namedtuple(Capability) Config specifying the particular build workflow Returns ------- tuple(bool, str) True, if this workflow can be built inside a container. False, along with a reason message if it cannot be.
[ "Given", "a", "workflow", "config", "this", "method", "provides", "a", "boolean", "on", "whether", "the", "workflow", "can", "run", "within", "a", "container", "or", "not", "." ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/lib/build/workflow_config.py#L128-L161
train
awslabs/aws-sam-cli
samcli/lib/build/workflow_config.py
ManifestWorkflowSelector.get_config
def get_config(self, code_dir, project_dir): """ Finds a configuration by looking for a manifest in the given directories. Returns ------- samcli.lib.build.workflow_config.CONFIG A supported configuration if one is found Raises ------ ValueError If none of the supported manifests files are found """ # Search for manifest first in code directory and then in the project directory. # Search order is important here because we want to prefer the manifest present within the code directory over # a manifest present in project directory. search_dirs = [code_dir, project_dir] LOG.debug("Looking for a supported build workflow in following directories: %s", search_dirs) for config in self.configs: if any([self._has_manifest(config, directory) for directory in search_dirs]): return config raise ValueError("None of the supported manifests '{}' were found in the following paths '{}'".format( [config.manifest_name for config in self.configs], search_dirs))
python
def get_config(self, code_dir, project_dir): """ Finds a configuration by looking for a manifest in the given directories. Returns ------- samcli.lib.build.workflow_config.CONFIG A supported configuration if one is found Raises ------ ValueError If none of the supported manifests files are found """ # Search for manifest first in code directory and then in the project directory. # Search order is important here because we want to prefer the manifest present within the code directory over # a manifest present in project directory. search_dirs = [code_dir, project_dir] LOG.debug("Looking for a supported build workflow in following directories: %s", search_dirs) for config in self.configs: if any([self._has_manifest(config, directory) for directory in search_dirs]): return config raise ValueError("None of the supported manifests '{}' were found in the following paths '{}'".format( [config.manifest_name for config in self.configs], search_dirs))
[ "def", "get_config", "(", "self", ",", "code_dir", ",", "project_dir", ")", ":", "# Search for manifest first in code directory and then in the project directory.", "# Search order is important here because we want to prefer the manifest present within the code directory over", "# a manifest present in project directory.", "search_dirs", "=", "[", "code_dir", ",", "project_dir", "]", "LOG", ".", "debug", "(", "\"Looking for a supported build workflow in following directories: %s\"", ",", "search_dirs", ")", "for", "config", "in", "self", ".", "configs", ":", "if", "any", "(", "[", "self", ".", "_has_manifest", "(", "config", ",", "directory", ")", "for", "directory", "in", "search_dirs", "]", ")", ":", "return", "config", "raise", "ValueError", "(", "\"None of the supported manifests '{}' were found in the following paths '{}'\"", ".", "format", "(", "[", "config", ".", "manifest_name", "for", "config", "in", "self", ".", "configs", "]", ",", "search_dirs", ")", ")" ]
Finds a configuration by looking for a manifest in the given directories. Returns ------- samcli.lib.build.workflow_config.CONFIG A supported configuration if one is found Raises ------ ValueError If none of the supported manifests files are found
[ "Finds", "a", "configuration", "by", "looking", "for", "a", "manifest", "in", "the", "given", "directories", "." ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/lib/build/workflow_config.py#L188-L216
train
awslabs/aws-sam-cli
samcli/yamlhelper.py
intrinsics_multi_constructor
def intrinsics_multi_constructor(loader, tag_prefix, node): """ YAML constructor to parse CloudFormation intrinsics. This will return a dictionary with key being the instrinsic name """ # Get the actual tag name excluding the first exclamation tag = node.tag[1:] # Some intrinsic functions doesn't support prefix "Fn::" prefix = "Fn::" if tag in ["Ref", "Condition"]: prefix = "" cfntag = prefix + tag if tag == "GetAtt" and isinstance(node.value, six.string_types): # ShortHand notation for !GetAtt accepts Resource.Attribute format # while the standard notation is to use an array # [Resource, Attribute]. Convert shorthand to standard format value = node.value.split(".", 1) elif isinstance(node, ScalarNode): # Value of this node is scalar value = loader.construct_scalar(node) elif isinstance(node, SequenceNode): # Value of this node is an array (Ex: [1,2]) value = loader.construct_sequence(node) else: # Value of this node is an mapping (ex: {foo: bar}) value = loader.construct_mapping(node) return {cfntag: value}
python
def intrinsics_multi_constructor(loader, tag_prefix, node): """ YAML constructor to parse CloudFormation intrinsics. This will return a dictionary with key being the instrinsic name """ # Get the actual tag name excluding the first exclamation tag = node.tag[1:] # Some intrinsic functions doesn't support prefix "Fn::" prefix = "Fn::" if tag in ["Ref", "Condition"]: prefix = "" cfntag = prefix + tag if tag == "GetAtt" and isinstance(node.value, six.string_types): # ShortHand notation for !GetAtt accepts Resource.Attribute format # while the standard notation is to use an array # [Resource, Attribute]. Convert shorthand to standard format value = node.value.split(".", 1) elif isinstance(node, ScalarNode): # Value of this node is scalar value = loader.construct_scalar(node) elif isinstance(node, SequenceNode): # Value of this node is an array (Ex: [1,2]) value = loader.construct_sequence(node) else: # Value of this node is an mapping (ex: {foo: bar}) value = loader.construct_mapping(node) return {cfntag: value}
[ "def", "intrinsics_multi_constructor", "(", "loader", ",", "tag_prefix", ",", "node", ")", ":", "# Get the actual tag name excluding the first exclamation", "tag", "=", "node", ".", "tag", "[", "1", ":", "]", "# Some intrinsic functions doesn't support prefix \"Fn::\"", "prefix", "=", "\"Fn::\"", "if", "tag", "in", "[", "\"Ref\"", ",", "\"Condition\"", "]", ":", "prefix", "=", "\"\"", "cfntag", "=", "prefix", "+", "tag", "if", "tag", "==", "\"GetAtt\"", "and", "isinstance", "(", "node", ".", "value", ",", "six", ".", "string_types", ")", ":", "# ShortHand notation for !GetAtt accepts Resource.Attribute format", "# while the standard notation is to use an array", "# [Resource, Attribute]. Convert shorthand to standard format", "value", "=", "node", ".", "value", ".", "split", "(", "\".\"", ",", "1", ")", "elif", "isinstance", "(", "node", ",", "ScalarNode", ")", ":", "# Value of this node is scalar", "value", "=", "loader", ".", "construct_scalar", "(", "node", ")", "elif", "isinstance", "(", "node", ",", "SequenceNode", ")", ":", "# Value of this node is an array (Ex: [1,2])", "value", "=", "loader", ".", "construct_sequence", "(", "node", ")", "else", ":", "# Value of this node is an mapping (ex: {foo: bar})", "value", "=", "loader", ".", "construct_mapping", "(", "node", ")", "return", "{", "cfntag", ":", "value", "}" ]
YAML constructor to parse CloudFormation intrinsics. This will return a dictionary with key being the instrinsic name
[ "YAML", "constructor", "to", "parse", "CloudFormation", "intrinsics", ".", "This", "will", "return", "a", "dictionary", "with", "key", "being", "the", "instrinsic", "name" ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/yamlhelper.py#L12-L46
train
awslabs/aws-sam-cli
samcli/yamlhelper.py
yaml_parse
def yaml_parse(yamlstr): """Parse a yaml string""" try: # PyYAML doesn't support json as well as it should, so if the input # is actually just json it is better to parse it with the standard # json parser. return json.loads(yamlstr) except ValueError: yaml.SafeLoader.add_multi_constructor("!", intrinsics_multi_constructor) return yaml.safe_load(yamlstr)
python
def yaml_parse(yamlstr): """Parse a yaml string""" try: # PyYAML doesn't support json as well as it should, so if the input # is actually just json it is better to parse it with the standard # json parser. return json.loads(yamlstr) except ValueError: yaml.SafeLoader.add_multi_constructor("!", intrinsics_multi_constructor) return yaml.safe_load(yamlstr)
[ "def", "yaml_parse", "(", "yamlstr", ")", ":", "try", ":", "# PyYAML doesn't support json as well as it should, so if the input", "# is actually just json it is better to parse it with the standard", "# json parser.", "return", "json", ".", "loads", "(", "yamlstr", ")", "except", "ValueError", ":", "yaml", ".", "SafeLoader", ".", "add_multi_constructor", "(", "\"!\"", ",", "intrinsics_multi_constructor", ")", "return", "yaml", ".", "safe_load", "(", "yamlstr", ")" ]
Parse a yaml string
[ "Parse", "a", "yaml", "string" ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/yamlhelper.py#L58-L67
train
awslabs/aws-sam-cli
samcli/commands/local/lib/generated_sample_events/events.py
Events.encode
def encode(self, tags, encoding, values_to_sub): """ reads the encoding type from the event-mapping.json and determines whether a value needs encoding Parameters ---------- tags: dict the values of a particular event that can be substituted within the event json encoding: string string that helps navigate to the encoding field of the json values_to_sub: dict key/value pairs that will be substituted into the json Returns ------- values_to_sub: dict the encoded (if need be) values to substitute into the json. """ for tag in tags: if tags[tag].get(encoding) != "None": if tags[tag].get(encoding) == "url": values_to_sub[tag] = self.url_encode(values_to_sub[tag]) if tags[tag].get(encoding) == "base64": values_to_sub[tag] = self.base64_utf_encode(values_to_sub[tag]) return values_to_sub
python
def encode(self, tags, encoding, values_to_sub): """ reads the encoding type from the event-mapping.json and determines whether a value needs encoding Parameters ---------- tags: dict the values of a particular event that can be substituted within the event json encoding: string string that helps navigate to the encoding field of the json values_to_sub: dict key/value pairs that will be substituted into the json Returns ------- values_to_sub: dict the encoded (if need be) values to substitute into the json. """ for tag in tags: if tags[tag].get(encoding) != "None": if tags[tag].get(encoding) == "url": values_to_sub[tag] = self.url_encode(values_to_sub[tag]) if tags[tag].get(encoding) == "base64": values_to_sub[tag] = self.base64_utf_encode(values_to_sub[tag]) return values_to_sub
[ "def", "encode", "(", "self", ",", "tags", ",", "encoding", ",", "values_to_sub", ")", ":", "for", "tag", "in", "tags", ":", "if", "tags", "[", "tag", "]", ".", "get", "(", "encoding", ")", "!=", "\"None\"", ":", "if", "tags", "[", "tag", "]", ".", "get", "(", "encoding", ")", "==", "\"url\"", ":", "values_to_sub", "[", "tag", "]", "=", "self", ".", "url_encode", "(", "values_to_sub", "[", "tag", "]", ")", "if", "tags", "[", "tag", "]", ".", "get", "(", "encoding", ")", "==", "\"base64\"", ":", "values_to_sub", "[", "tag", "]", "=", "self", ".", "base64_utf_encode", "(", "values_to_sub", "[", "tag", "]", ")", "return", "values_to_sub" ]
reads the encoding type from the event-mapping.json and determines whether a value needs encoding Parameters ---------- tags: dict the values of a particular event that can be substituted within the event json encoding: string string that helps navigate to the encoding field of the json values_to_sub: dict key/value pairs that will be substituted into the json Returns ------- values_to_sub: dict the encoded (if need be) values to substitute into the json.
[ "reads", "the", "encoding", "type", "from", "the", "event", "-", "mapping", ".", "json", "and", "determines", "whether", "a", "value", "needs", "encoding" ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/commands/local/lib/generated_sample_events/events.py#L36-L62
train
awslabs/aws-sam-cli
samcli/commands/local/lib/generated_sample_events/events.py
Events.generate_event
def generate_event(self, service_name, event_type, values_to_sub): """ opens the event json, substitutes the values in, and returns the customized event json Parameters ---------- service_name: string name of the top level service (S3, apigateway, etc) event_type: string name of the event underneath the service values_to_sub: dict key/value pairs to substitute into the json Returns ------- renderer.render(): string string version of the custom event json """ # set variables for easy calling tags = self.event_mapping[service_name][event_type]['tags'] values_to_sub = self.encode(tags, 'encoding', values_to_sub) # construct the path to the Events json file this_folder = os.path.dirname(os.path.abspath(__file__)) file_name = self.event_mapping[service_name][event_type]['filename'] + ".json" file_path = os.path.join(this_folder, "events", service_name, file_name) # open the file with open(file_path) as f: data = json.load(f) data = json.dumps(data, indent=2) # return the substituted file return renderer.render(data, values_to_sub)
python
def generate_event(self, service_name, event_type, values_to_sub): """ opens the event json, substitutes the values in, and returns the customized event json Parameters ---------- service_name: string name of the top level service (S3, apigateway, etc) event_type: string name of the event underneath the service values_to_sub: dict key/value pairs to substitute into the json Returns ------- renderer.render(): string string version of the custom event json """ # set variables for easy calling tags = self.event_mapping[service_name][event_type]['tags'] values_to_sub = self.encode(tags, 'encoding', values_to_sub) # construct the path to the Events json file this_folder = os.path.dirname(os.path.abspath(__file__)) file_name = self.event_mapping[service_name][event_type]['filename'] + ".json" file_path = os.path.join(this_folder, "events", service_name, file_name) # open the file with open(file_path) as f: data = json.load(f) data = json.dumps(data, indent=2) # return the substituted file return renderer.render(data, values_to_sub)
[ "def", "generate_event", "(", "self", ",", "service_name", ",", "event_type", ",", "values_to_sub", ")", ":", "# set variables for easy calling", "tags", "=", "self", ".", "event_mapping", "[", "service_name", "]", "[", "event_type", "]", "[", "'tags'", "]", "values_to_sub", "=", "self", ".", "encode", "(", "tags", ",", "'encoding'", ",", "values_to_sub", ")", "# construct the path to the Events json file", "this_folder", "=", "os", ".", "path", ".", "dirname", "(", "os", ".", "path", ".", "abspath", "(", "__file__", ")", ")", "file_name", "=", "self", ".", "event_mapping", "[", "service_name", "]", "[", "event_type", "]", "[", "'filename'", "]", "+", "\".json\"", "file_path", "=", "os", ".", "path", ".", "join", "(", "this_folder", ",", "\"events\"", ",", "service_name", ",", "file_name", ")", "# open the file", "with", "open", "(", "file_path", ")", "as", "f", ":", "data", "=", "json", ".", "load", "(", "f", ")", "data", "=", "json", ".", "dumps", "(", "data", ",", "indent", "=", "2", ")", "# return the substituted file", "return", "renderer", ".", "render", "(", "data", ",", "values_to_sub", ")" ]
opens the event json, substitutes the values in, and returns the customized event json Parameters ---------- service_name: string name of the top level service (S3, apigateway, etc) event_type: string name of the event underneath the service values_to_sub: dict key/value pairs to substitute into the json Returns ------- renderer.render(): string string version of the custom event json
[ "opens", "the", "event", "json", "substitutes", "the", "values", "in", "and", "returns", "the", "customized", "event", "json" ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/commands/local/lib/generated_sample_events/events.py#L94-L129
train
awslabs/aws-sam-cli
samcli/lib/utils/colors.py
Colored.underline
def underline(self, msg): """Underline the input""" return click.style(msg, underline=True) if self.colorize else msg
python
def underline(self, msg): """Underline the input""" return click.style(msg, underline=True) if self.colorize else msg
[ "def", "underline", "(", "self", ",", "msg", ")", ":", "return", "click", ".", "style", "(", "msg", ",", "underline", "=", "True", ")", "if", "self", ".", "colorize", "else", "msg" ]
Underline the input
[ "Underline", "the", "input" ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/lib/utils/colors.py#L57-L59
train
awslabs/aws-sam-cli
samcli/lib/utils/colors.py
Colored._color
def _color(self, msg, color): """Internal helper method to add colors to input""" kwargs = {'fg': color} return click.style(msg, **kwargs) if self.colorize else msg
python
def _color(self, msg, color): """Internal helper method to add colors to input""" kwargs = {'fg': color} return click.style(msg, **kwargs) if self.colorize else msg
[ "def", "_color", "(", "self", ",", "msg", ",", "color", ")", ":", "kwargs", "=", "{", "'fg'", ":", "color", "}", "return", "click", ".", "style", "(", "msg", ",", "*", "*", "kwargs", ")", "if", "self", ".", "colorize", "else", "msg" ]
Internal helper method to add colors to input
[ "Internal", "helper", "method", "to", "add", "colors", "to", "input" ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/lib/utils/colors.py#L61-L64
train
awslabs/aws-sam-cli
samcli/lib/logs/fetcher.py
LogsFetcher.fetch
def fetch(self, log_group_name, start=None, end=None, filter_pattern=None): """ Fetch logs from all streams under the given CloudWatch Log Group and yields in the output. Optionally, caller can filter the logs using a pattern or a start/end time. Parameters ---------- log_group_name : string Name of CloudWatch Logs Group to query. start : datetime.datetime Optional start time for logs. end : datetime.datetime Optional end time for logs. filter_pattern : str Expression to filter the logs by. This is passed directly to CloudWatch, so any expression supported by CloudWatch Logs API is supported here. Yields ------ samcli.lib.logs.event.LogEvent Object containing the information from each log event returned by CloudWatch Logs """ kwargs = { "logGroupName": log_group_name, "interleaved": True } if start: kwargs["startTime"] = to_timestamp(start) if end: kwargs["endTime"] = to_timestamp(end) if filter_pattern: kwargs["filterPattern"] = filter_pattern while True: LOG.debug("Fetching logs from CloudWatch with parameters %s", kwargs) result = self.cw_client.filter_log_events(**kwargs) # Several events will be returned. Yield one at a time for event in result.get('events', []): yield LogEvent(log_group_name, event) # Keep iterating until there are no more logs left to query. next_token = result.get("nextToken", None) kwargs["nextToken"] = next_token if not next_token: break
python
def fetch(self, log_group_name, start=None, end=None, filter_pattern=None): """ Fetch logs from all streams under the given CloudWatch Log Group and yields in the output. Optionally, caller can filter the logs using a pattern or a start/end time. Parameters ---------- log_group_name : string Name of CloudWatch Logs Group to query. start : datetime.datetime Optional start time for logs. end : datetime.datetime Optional end time for logs. filter_pattern : str Expression to filter the logs by. This is passed directly to CloudWatch, so any expression supported by CloudWatch Logs API is supported here. Yields ------ samcli.lib.logs.event.LogEvent Object containing the information from each log event returned by CloudWatch Logs """ kwargs = { "logGroupName": log_group_name, "interleaved": True } if start: kwargs["startTime"] = to_timestamp(start) if end: kwargs["endTime"] = to_timestamp(end) if filter_pattern: kwargs["filterPattern"] = filter_pattern while True: LOG.debug("Fetching logs from CloudWatch with parameters %s", kwargs) result = self.cw_client.filter_log_events(**kwargs) # Several events will be returned. Yield one at a time for event in result.get('events', []): yield LogEvent(log_group_name, event) # Keep iterating until there are no more logs left to query. next_token = result.get("nextToken", None) kwargs["nextToken"] = next_token if not next_token: break
[ "def", "fetch", "(", "self", ",", "log_group_name", ",", "start", "=", "None", ",", "end", "=", "None", ",", "filter_pattern", "=", "None", ")", ":", "kwargs", "=", "{", "\"logGroupName\"", ":", "log_group_name", ",", "\"interleaved\"", ":", "True", "}", "if", "start", ":", "kwargs", "[", "\"startTime\"", "]", "=", "to_timestamp", "(", "start", ")", "if", "end", ":", "kwargs", "[", "\"endTime\"", "]", "=", "to_timestamp", "(", "end", ")", "if", "filter_pattern", ":", "kwargs", "[", "\"filterPattern\"", "]", "=", "filter_pattern", "while", "True", ":", "LOG", ".", "debug", "(", "\"Fetching logs from CloudWatch with parameters %s\"", ",", "kwargs", ")", "result", "=", "self", ".", "cw_client", ".", "filter_log_events", "(", "*", "*", "kwargs", ")", "# Several events will be returned. Yield one at a time", "for", "event", "in", "result", ".", "get", "(", "'events'", ",", "[", "]", ")", ":", "yield", "LogEvent", "(", "log_group_name", ",", "event", ")", "# Keep iterating until there are no more logs left to query.", "next_token", "=", "result", ".", "get", "(", "\"nextToken\"", ",", "None", ")", "kwargs", "[", "\"nextToken\"", "]", "=", "next_token", "if", "not", "next_token", ":", "break" ]
Fetch logs from all streams under the given CloudWatch Log Group and yields in the output. Optionally, caller can filter the logs using a pattern or a start/end time. Parameters ---------- log_group_name : string Name of CloudWatch Logs Group to query. start : datetime.datetime Optional start time for logs. end : datetime.datetime Optional end time for logs. filter_pattern : str Expression to filter the logs by. This is passed directly to CloudWatch, so any expression supported by CloudWatch Logs API is supported here. Yields ------ samcli.lib.logs.event.LogEvent Object containing the information from each log event returned by CloudWatch Logs
[ "Fetch", "logs", "from", "all", "streams", "under", "the", "given", "CloudWatch", "Log", "Group", "and", "yields", "in", "the", "output", ".", "Optionally", "caller", "can", "filter", "the", "logs", "using", "a", "pattern", "or", "a", "start", "/", "end", "time", "." ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/lib/logs/fetcher.py#L32-L85
train
awslabs/aws-sam-cli
samcli/lib/logs/fetcher.py
LogsFetcher.tail
def tail(self, log_group_name, start=None, filter_pattern=None, max_retries=1000, poll_interval=0.3): """ ** This is a long blocking call ** Fetches logs from CloudWatch logs similar to the ``fetch`` method, but instead of stopping after all logs have been fetched, this method continues to poll CloudWatch for new logs. So this essentially simulates the ``tail -f`` bash command. If no logs are available, then it keep polling for ``timeout`` number of seconds before exiting. This method polls CloudWatch at around ~3 Calls Per Second to stay below the 5TPS limit. Parameters ---------- log_group_name : str Name of CloudWatch Logs Group to query. start : datetime.datetime Optional start time for logs. Defaults to '5m ago' filter_pattern : str Expression to filter the logs by. This is passed directly to CloudWatch, so any expression supported by CloudWatch Logs API is supported here. max_retries : int When logs are not available, this value determines the number of times to retry fetching logs before giving up. This counter is reset every time new logs are available. poll_interval : float Number of fractional seconds wait before polling again. Defaults to 300milliseconds. If no new logs available, this method will stop polling after ``max_retries * poll_interval`` seconds Yields ------ samcli.lib.logs.event.LogEvent Object containing the information from each log event returned by CloudWatch Logs """ # On every poll, startTime of the API call is the timestamp of last record observed latest_event_time = 0 # Start of epoch if start: latest_event_time = to_timestamp(start) counter = max_retries while counter > 0: LOG.debug("Tailing logs from %s starting at %s", log_group_name, str(latest_event_time)) has_data = False counter -= 1 events_itr = self.fetch(log_group_name, start=to_datetime(latest_event_time), filter_pattern=filter_pattern) # Find the timestamp of the most recent log event. for event in events_itr: has_data = True if event.timestamp_millis > latest_event_time: latest_event_time = event.timestamp_millis # Yield the event back so it behaves similar to ``fetch`` yield event # This poll fetched logs. Reset the retry counter and set the timestamp for next poll if has_data: counter = max_retries latest_event_time += 1 # one extra millisecond to fetch next log event # We already fetched logs once. Sleep for some time before querying again. # This also helps us scoot under the TPS limit for CloudWatch API call. time.sleep(poll_interval)
python
def tail(self, log_group_name, start=None, filter_pattern=None, max_retries=1000, poll_interval=0.3): """ ** This is a long blocking call ** Fetches logs from CloudWatch logs similar to the ``fetch`` method, but instead of stopping after all logs have been fetched, this method continues to poll CloudWatch for new logs. So this essentially simulates the ``tail -f`` bash command. If no logs are available, then it keep polling for ``timeout`` number of seconds before exiting. This method polls CloudWatch at around ~3 Calls Per Second to stay below the 5TPS limit. Parameters ---------- log_group_name : str Name of CloudWatch Logs Group to query. start : datetime.datetime Optional start time for logs. Defaults to '5m ago' filter_pattern : str Expression to filter the logs by. This is passed directly to CloudWatch, so any expression supported by CloudWatch Logs API is supported here. max_retries : int When logs are not available, this value determines the number of times to retry fetching logs before giving up. This counter is reset every time new logs are available. poll_interval : float Number of fractional seconds wait before polling again. Defaults to 300milliseconds. If no new logs available, this method will stop polling after ``max_retries * poll_interval`` seconds Yields ------ samcli.lib.logs.event.LogEvent Object containing the information from each log event returned by CloudWatch Logs """ # On every poll, startTime of the API call is the timestamp of last record observed latest_event_time = 0 # Start of epoch if start: latest_event_time = to_timestamp(start) counter = max_retries while counter > 0: LOG.debug("Tailing logs from %s starting at %s", log_group_name, str(latest_event_time)) has_data = False counter -= 1 events_itr = self.fetch(log_group_name, start=to_datetime(latest_event_time), filter_pattern=filter_pattern) # Find the timestamp of the most recent log event. for event in events_itr: has_data = True if event.timestamp_millis > latest_event_time: latest_event_time = event.timestamp_millis # Yield the event back so it behaves similar to ``fetch`` yield event # This poll fetched logs. Reset the retry counter and set the timestamp for next poll if has_data: counter = max_retries latest_event_time += 1 # one extra millisecond to fetch next log event # We already fetched logs once. Sleep for some time before querying again. # This also helps us scoot under the TPS limit for CloudWatch API call. time.sleep(poll_interval)
[ "def", "tail", "(", "self", ",", "log_group_name", ",", "start", "=", "None", ",", "filter_pattern", "=", "None", ",", "max_retries", "=", "1000", ",", "poll_interval", "=", "0.3", ")", ":", "# On every poll, startTime of the API call is the timestamp of last record observed", "latest_event_time", "=", "0", "# Start of epoch", "if", "start", ":", "latest_event_time", "=", "to_timestamp", "(", "start", ")", "counter", "=", "max_retries", "while", "counter", ">", "0", ":", "LOG", ".", "debug", "(", "\"Tailing logs from %s starting at %s\"", ",", "log_group_name", ",", "str", "(", "latest_event_time", ")", ")", "has_data", "=", "False", "counter", "-=", "1", "events_itr", "=", "self", ".", "fetch", "(", "log_group_name", ",", "start", "=", "to_datetime", "(", "latest_event_time", ")", ",", "filter_pattern", "=", "filter_pattern", ")", "# Find the timestamp of the most recent log event.", "for", "event", "in", "events_itr", ":", "has_data", "=", "True", "if", "event", ".", "timestamp_millis", ">", "latest_event_time", ":", "latest_event_time", "=", "event", ".", "timestamp_millis", "# Yield the event back so it behaves similar to ``fetch``", "yield", "event", "# This poll fetched logs. Reset the retry counter and set the timestamp for next poll", "if", "has_data", ":", "counter", "=", "max_retries", "latest_event_time", "+=", "1", "# one extra millisecond to fetch next log event", "# We already fetched logs once. Sleep for some time before querying again.", "# This also helps us scoot under the TPS limit for CloudWatch API call.", "time", ".", "sleep", "(", "poll_interval", ")" ]
** This is a long blocking call ** Fetches logs from CloudWatch logs similar to the ``fetch`` method, but instead of stopping after all logs have been fetched, this method continues to poll CloudWatch for new logs. So this essentially simulates the ``tail -f`` bash command. If no logs are available, then it keep polling for ``timeout`` number of seconds before exiting. This method polls CloudWatch at around ~3 Calls Per Second to stay below the 5TPS limit. Parameters ---------- log_group_name : str Name of CloudWatch Logs Group to query. start : datetime.datetime Optional start time for logs. Defaults to '5m ago' filter_pattern : str Expression to filter the logs by. This is passed directly to CloudWatch, so any expression supported by CloudWatch Logs API is supported here. max_retries : int When logs are not available, this value determines the number of times to retry fetching logs before giving up. This counter is reset every time new logs are available. poll_interval : float Number of fractional seconds wait before polling again. Defaults to 300milliseconds. If no new logs available, this method will stop polling after ``max_retries * poll_interval`` seconds Yields ------ samcli.lib.logs.event.LogEvent Object containing the information from each log event returned by CloudWatch Logs
[ "**", "This", "is", "a", "long", "blocking", "call", "**" ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/lib/logs/fetcher.py#L87-L157
train
awslabs/aws-sam-cli
samcli/commands/local/lib/provider.py
LayerVersion._compute_layer_version
def _compute_layer_version(is_defined_within_template, arn): """ Parses out the Layer version from the arn Parameters ---------- is_defined_within_template bool True if the resource is a Ref to a resource otherwise False arn str ARN of the Resource Returns ------- int The Version of the LayerVersion """ if is_defined_within_template: return None try: _, layer_version = arn.rsplit(':', 1) layer_version = int(layer_version) except ValueError: raise InvalidLayerVersionArn(arn + " is an Invalid Layer Arn.") return layer_version
python
def _compute_layer_version(is_defined_within_template, arn): """ Parses out the Layer version from the arn Parameters ---------- is_defined_within_template bool True if the resource is a Ref to a resource otherwise False arn str ARN of the Resource Returns ------- int The Version of the LayerVersion """ if is_defined_within_template: return None try: _, layer_version = arn.rsplit(':', 1) layer_version = int(layer_version) except ValueError: raise InvalidLayerVersionArn(arn + " is an Invalid Layer Arn.") return layer_version
[ "def", "_compute_layer_version", "(", "is_defined_within_template", ",", "arn", ")", ":", "if", "is_defined_within_template", ":", "return", "None", "try", ":", "_", ",", "layer_version", "=", "arn", ".", "rsplit", "(", "':'", ",", "1", ")", "layer_version", "=", "int", "(", "layer_version", ")", "except", "ValueError", ":", "raise", "InvalidLayerVersionArn", "(", "arn", "+", "\" is an Invalid Layer Arn.\"", ")", "return", "layer_version" ]
Parses out the Layer version from the arn Parameters ---------- is_defined_within_template bool True if the resource is a Ref to a resource otherwise False arn str ARN of the Resource Returns ------- int The Version of the LayerVersion
[ "Parses", "out", "the", "Layer", "version", "from", "the", "arn" ]
c05af5e7378c6f05f7d82ad3f0bca17204177db6
https://github.com/awslabs/aws-sam-cli/blob/c05af5e7378c6f05f7d82ad3f0bca17204177db6/samcli/commands/local/lib/provider.py#L72-L99
train