repo
stringlengths
7
55
path
stringlengths
4
127
func_name
stringlengths
1
88
original_string
stringlengths
75
19.8k
language
stringclasses
1 value
code
stringlengths
75
19.8k
code_tokens
listlengths
20
707
docstring
stringlengths
3
17.3k
docstring_tokens
listlengths
3
222
sha
stringlengths
40
40
url
stringlengths
87
242
partition
stringclasses
1 value
idx
int64
0
252k
python-hyper/h11
h11/_connection.py
Connection.next_event
def next_event(self): """Parse the next event out of our receive buffer, update our internal state, and return it. This is a mutating operation -- think of it like calling :func:`next` on an iterator. Returns: : One of three things: 1) An event object -- see :ref:`events`. 2) The special constant :data:`NEED_DATA`, which indicates that you need to read more data from your socket and pass it to :meth:`receive_data` before this method will be able to return any more events. 3) The special constant :data:`PAUSED`, which indicates that we are not in a state where we can process incoming data (usually because the peer has finished their part of the current request/response cycle, and you have not yet called :meth:`start_next_cycle`). See :ref:`flow-control` for details. Raises: RemoteProtocolError: The peer has misbehaved. You should close the connection (possibly after sending some kind of 4xx response). Once this method returns :class:`ConnectionClosed` once, then all subsequent calls will also return :class:`ConnectionClosed`. If this method raises any exception besides :exc:`RemoteProtocolError` then that's a bug -- if it happens please file a bug report! If this method raises any exception then it also sets :attr:`Connection.their_state` to :data:`ERROR` -- see :ref:`error-handling` for discussion. """ if self.their_state is ERROR: raise RemoteProtocolError( "Can't receive data when peer state is ERROR") try: event = self._extract_next_receive_event() if event not in [NEED_DATA, PAUSED]: self._process_event(self.their_role, event) self._receive_buffer.compress() if event is NEED_DATA: if len(self._receive_buffer) > self._max_incomplete_event_size: # 431 is "Request header fields too large" which is pretty # much the only situation where we can get here raise RemoteProtocolError("Receive buffer too long", error_status_hint=431) if self._receive_buffer_closed: # We're still trying to complete some event, but that's # never going to happen because no more data is coming raise RemoteProtocolError( "peer unexpectedly closed connection") return event except BaseException as exc: self._process_error(self.their_role) if isinstance(exc, LocalProtocolError): exc._reraise_as_remote_protocol_error() else: raise
python
def next_event(self): """Parse the next event out of our receive buffer, update our internal state, and return it. This is a mutating operation -- think of it like calling :func:`next` on an iterator. Returns: : One of three things: 1) An event object -- see :ref:`events`. 2) The special constant :data:`NEED_DATA`, which indicates that you need to read more data from your socket and pass it to :meth:`receive_data` before this method will be able to return any more events. 3) The special constant :data:`PAUSED`, which indicates that we are not in a state where we can process incoming data (usually because the peer has finished their part of the current request/response cycle, and you have not yet called :meth:`start_next_cycle`). See :ref:`flow-control` for details. Raises: RemoteProtocolError: The peer has misbehaved. You should close the connection (possibly after sending some kind of 4xx response). Once this method returns :class:`ConnectionClosed` once, then all subsequent calls will also return :class:`ConnectionClosed`. If this method raises any exception besides :exc:`RemoteProtocolError` then that's a bug -- if it happens please file a bug report! If this method raises any exception then it also sets :attr:`Connection.their_state` to :data:`ERROR` -- see :ref:`error-handling` for discussion. """ if self.their_state is ERROR: raise RemoteProtocolError( "Can't receive data when peer state is ERROR") try: event = self._extract_next_receive_event() if event not in [NEED_DATA, PAUSED]: self._process_event(self.their_role, event) self._receive_buffer.compress() if event is NEED_DATA: if len(self._receive_buffer) > self._max_incomplete_event_size: # 431 is "Request header fields too large" which is pretty # much the only situation where we can get here raise RemoteProtocolError("Receive buffer too long", error_status_hint=431) if self._receive_buffer_closed: # We're still trying to complete some event, but that's # never going to happen because no more data is coming raise RemoteProtocolError( "peer unexpectedly closed connection") return event except BaseException as exc: self._process_error(self.their_role) if isinstance(exc, LocalProtocolError): exc._reraise_as_remote_protocol_error() else: raise
[ "def", "next_event", "(", "self", ")", ":", "if", "self", ".", "their_state", "is", "ERROR", ":", "raise", "RemoteProtocolError", "(", "\"Can't receive data when peer state is ERROR\"", ")", "try", ":", "event", "=", "self", ".", "_extract_next_receive_event", "(", ...
Parse the next event out of our receive buffer, update our internal state, and return it. This is a mutating operation -- think of it like calling :func:`next` on an iterator. Returns: : One of three things: 1) An event object -- see :ref:`events`. 2) The special constant :data:`NEED_DATA`, which indicates that you need to read more data from your socket and pass it to :meth:`receive_data` before this method will be able to return any more events. 3) The special constant :data:`PAUSED`, which indicates that we are not in a state where we can process incoming data (usually because the peer has finished their part of the current request/response cycle, and you have not yet called :meth:`start_next_cycle`). See :ref:`flow-control` for details. Raises: RemoteProtocolError: The peer has misbehaved. You should close the connection (possibly after sending some kind of 4xx response). Once this method returns :class:`ConnectionClosed` once, then all subsequent calls will also return :class:`ConnectionClosed`. If this method raises any exception besides :exc:`RemoteProtocolError` then that's a bug -- if it happens please file a bug report! If this method raises any exception then it also sets :attr:`Connection.their_state` to :data:`ERROR` -- see :ref:`error-handling` for discussion.
[ "Parse", "the", "next", "event", "out", "of", "our", "receive", "buffer", "update", "our", "internal", "state", "and", "return", "it", "." ]
836d95d1c2af2f9153c86dbc8d9784341d73c6a6
https://github.com/python-hyper/h11/blob/836d95d1c2af2f9153c86dbc8d9784341d73c6a6/h11/_connection.py#L376-L441
train
212,300
python-hyper/h11
h11/_connection.py
Connection.send
def send(self, event): """Convert a high-level event into bytes that can be sent to the peer, while updating our internal state machine. Args: event: The :ref:`event <events>` to send. Returns: If ``type(event) is ConnectionClosed``, then returns ``None``. Otherwise, returns a :term:`bytes-like object`. Raises: LocalProtocolError: Sending this event at this time would violate our understanding of the HTTP/1.1 protocol. If this method raises any exception then it also sets :attr:`Connection.our_state` to :data:`ERROR` -- see :ref:`error-handling` for discussion. """ data_list = self.send_with_data_passthrough(event) if data_list is None: return None else: return b"".join(data_list)
python
def send(self, event): """Convert a high-level event into bytes that can be sent to the peer, while updating our internal state machine. Args: event: The :ref:`event <events>` to send. Returns: If ``type(event) is ConnectionClosed``, then returns ``None``. Otherwise, returns a :term:`bytes-like object`. Raises: LocalProtocolError: Sending this event at this time would violate our understanding of the HTTP/1.1 protocol. If this method raises any exception then it also sets :attr:`Connection.our_state` to :data:`ERROR` -- see :ref:`error-handling` for discussion. """ data_list = self.send_with_data_passthrough(event) if data_list is None: return None else: return b"".join(data_list)
[ "def", "send", "(", "self", ",", "event", ")", ":", "data_list", "=", "self", ".", "send_with_data_passthrough", "(", "event", ")", "if", "data_list", "is", "None", ":", "return", "None", "else", ":", "return", "b\"\"", ".", "join", "(", "data_list", ")"...
Convert a high-level event into bytes that can be sent to the peer, while updating our internal state machine. Args: event: The :ref:`event <events>` to send. Returns: If ``type(event) is ConnectionClosed``, then returns ``None``. Otherwise, returns a :term:`bytes-like object`. Raises: LocalProtocolError: Sending this event at this time would violate our understanding of the HTTP/1.1 protocol. If this method raises any exception then it also sets :attr:`Connection.our_state` to :data:`ERROR` -- see :ref:`error-handling` for discussion.
[ "Convert", "a", "high", "-", "level", "event", "into", "bytes", "that", "can", "be", "sent", "to", "the", "peer", "while", "updating", "our", "internal", "state", "machine", "." ]
836d95d1c2af2f9153c86dbc8d9784341d73c6a6
https://github.com/python-hyper/h11/blob/836d95d1c2af2f9153c86dbc8d9784341d73c6a6/h11/_connection.py#L443-L468
train
212,301
drj11/pypng
code/png.py
adam7_generate
def adam7_generate(width, height): """ Generate the coordinates for the reduced scanlines of an Adam7 interlaced image of size `width` by `height` pixels. Yields a generator for each pass, and each pass generator yields a series of (x, y, xstep) triples, each one identifying a reduced scanline consisting of pixels starting at (x, y) and taking every xstep pixel to the right. """ for xstart, ystart, xstep, ystep in adam7: if xstart >= width: continue yield ((xstart, y, xstep) for y in range(ystart, height, ystep))
python
def adam7_generate(width, height): """ Generate the coordinates for the reduced scanlines of an Adam7 interlaced image of size `width` by `height` pixels. Yields a generator for each pass, and each pass generator yields a series of (x, y, xstep) triples, each one identifying a reduced scanline consisting of pixels starting at (x, y) and taking every xstep pixel to the right. """ for xstart, ystart, xstep, ystep in adam7: if xstart >= width: continue yield ((xstart, y, xstep) for y in range(ystart, height, ystep))
[ "def", "adam7_generate", "(", "width", ",", "height", ")", ":", "for", "xstart", ",", "ystart", ",", "xstep", ",", "ystep", "in", "adam7", ":", "if", "xstart", ">=", "width", ":", "continue", "yield", "(", "(", "xstart", ",", "y", ",", "xstep", ")", ...
Generate the coordinates for the reduced scanlines of an Adam7 interlaced image of size `width` by `height` pixels. Yields a generator for each pass, and each pass generator yields a series of (x, y, xstep) triples, each one identifying a reduced scanline consisting of pixels starting at (x, y) and taking every xstep pixel to the right.
[ "Generate", "the", "coordinates", "for", "the", "reduced", "scanlines", "of", "an", "Adam7", "interlaced", "image", "of", "size", "width", "by", "height", "pixels", "." ]
b8220ca9f58e4c5bc1d507e713744fcb8c049225
https://github.com/drj11/pypng/blob/b8220ca9f58e4c5bc1d507e713744fcb8c049225/code/png.py#L209-L224
train
212,302
drj11/pypng
code/png.py
write_chunk
def write_chunk(outfile, tag, data=b''): """ Write a PNG chunk to the output file, including length and checksum. """ data = bytes(data) # http://www.w3.org/TR/PNG/#5Chunk-layout outfile.write(struct.pack("!I", len(data))) outfile.write(tag) outfile.write(data) checksum = zlib.crc32(tag) checksum = zlib.crc32(data, checksum) checksum &= 2 ** 32 - 1 outfile.write(struct.pack("!I", checksum))
python
def write_chunk(outfile, tag, data=b''): """ Write a PNG chunk to the output file, including length and checksum. """ data = bytes(data) # http://www.w3.org/TR/PNG/#5Chunk-layout outfile.write(struct.pack("!I", len(data))) outfile.write(tag) outfile.write(data) checksum = zlib.crc32(tag) checksum = zlib.crc32(data, checksum) checksum &= 2 ** 32 - 1 outfile.write(struct.pack("!I", checksum))
[ "def", "write_chunk", "(", "outfile", ",", "tag", ",", "data", "=", "b''", ")", ":", "data", "=", "bytes", "(", "data", ")", "# http://www.w3.org/TR/PNG/#5Chunk-layout", "outfile", ".", "write", "(", "struct", ".", "pack", "(", "\"!I\"", ",", "len", "(", ...
Write a PNG chunk to the output file, including length and checksum.
[ "Write", "a", "PNG", "chunk", "to", "the", "output", "file", "including", "length", "and", "checksum", "." ]
b8220ca9f58e4c5bc1d507e713744fcb8c049225
https://github.com/drj11/pypng/blob/b8220ca9f58e4c5bc1d507e713744fcb8c049225/code/png.py#L896-L910
train
212,303
drj11/pypng
code/png.py
pack_rows
def pack_rows(rows, bitdepth): """Yield packed rows that are a byte array. Each byte is packed with the values from several pixels. """ assert bitdepth < 8 assert 8 % bitdepth == 0 # samples per byte spb = int(8 / bitdepth) def make_byte(block): """Take a block of (2, 4, or 8) values, and pack them into a single byte. """ res = 0 for v in block: res = (res << bitdepth) + v return res for row in rows: a = bytearray(row) # Adding padding bytes so we can group into a whole # number of spb-tuples. n = float(len(a)) extra = math.ceil(n / spb) * spb - n a.extend([0] * int(extra)) # Pack into bytes. # Each block is the samples for one byte. blocks = group(a, spb) yield bytearray(make_byte(block) for block in blocks)
python
def pack_rows(rows, bitdepth): """Yield packed rows that are a byte array. Each byte is packed with the values from several pixels. """ assert bitdepth < 8 assert 8 % bitdepth == 0 # samples per byte spb = int(8 / bitdepth) def make_byte(block): """Take a block of (2, 4, or 8) values, and pack them into a single byte. """ res = 0 for v in block: res = (res << bitdepth) + v return res for row in rows: a = bytearray(row) # Adding padding bytes so we can group into a whole # number of spb-tuples. n = float(len(a)) extra = math.ceil(n / spb) * spb - n a.extend([0] * int(extra)) # Pack into bytes. # Each block is the samples for one byte. blocks = group(a, spb) yield bytearray(make_byte(block) for block in blocks)
[ "def", "pack_rows", "(", "rows", ",", "bitdepth", ")", ":", "assert", "bitdepth", "<", "8", "assert", "8", "%", "bitdepth", "==", "0", "# samples per byte", "spb", "=", "int", "(", "8", "/", "bitdepth", ")", "def", "make_byte", "(", "block", ")", ":", ...
Yield packed rows that are a byte array. Each byte is packed with the values from several pixels.
[ "Yield", "packed", "rows", "that", "are", "a", "byte", "array", ".", "Each", "byte", "is", "packed", "with", "the", "values", "from", "several", "pixels", "." ]
b8220ca9f58e4c5bc1d507e713744fcb8c049225
https://github.com/drj11/pypng/blob/b8220ca9f58e4c5bc1d507e713744fcb8c049225/code/png.py#L954-L985
train
212,304
drj11/pypng
code/png.py
unpack_rows
def unpack_rows(rows): """Unpack each row from being 16-bits per value, to being a sequence of bytes. """ for row in rows: fmt = '!%dH' % len(row) yield bytearray(struct.pack(fmt, *row))
python
def unpack_rows(rows): """Unpack each row from being 16-bits per value, to being a sequence of bytes. """ for row in rows: fmt = '!%dH' % len(row) yield bytearray(struct.pack(fmt, *row))
[ "def", "unpack_rows", "(", "rows", ")", ":", "for", "row", "in", "rows", ":", "fmt", "=", "'!%dH'", "%", "len", "(", "row", ")", "yield", "bytearray", "(", "struct", ".", "pack", "(", "fmt", ",", "*", "row", ")", ")" ]
Unpack each row from being 16-bits per value, to being a sequence of bytes.
[ "Unpack", "each", "row", "from", "being", "16", "-", "bits", "per", "value", "to", "being", "a", "sequence", "of", "bytes", "." ]
b8220ca9f58e4c5bc1d507e713744fcb8c049225
https://github.com/drj11/pypng/blob/b8220ca9f58e4c5bc1d507e713744fcb8c049225/code/png.py#L988-L994
train
212,305
drj11/pypng
code/png.py
is_natural
def is_natural(x): """A non-negative integer.""" try: is_integer = int(x) == x except (TypeError, ValueError): return False return is_integer and x >= 0
python
def is_natural(x): """A non-negative integer.""" try: is_integer = int(x) == x except (TypeError, ValueError): return False return is_integer and x >= 0
[ "def", "is_natural", "(", "x", ")", ":", "try", ":", "is_integer", "=", "int", "(", "x", ")", "==", "x", "except", "(", "TypeError", ",", "ValueError", ")", ":", "return", "False", "return", "is_integer", "and", "x", ">=", "0" ]
A non-negative integer.
[ "A", "non", "-", "negative", "integer", "." ]
b8220ca9f58e4c5bc1d507e713744fcb8c049225
https://github.com/drj11/pypng/blob/b8220ca9f58e4c5bc1d507e713744fcb8c049225/code/png.py#L2203-L2209
train
212,306
drj11/pypng
code/png.py
binary_stdout
def binary_stdout(): """ A sys.stdout that accepts bytes. """ # First there is a Python3 issue. try: stdout = sys.stdout.buffer except AttributeError: # Probably Python 2, where bytes are strings. stdout = sys.stdout # On Windows the C runtime file orientation needs changing. if sys.platform == "win32": import msvcrt import os msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY) return stdout
python
def binary_stdout(): """ A sys.stdout that accepts bytes. """ # First there is a Python3 issue. try: stdout = sys.stdout.buffer except AttributeError: # Probably Python 2, where bytes are strings. stdout = sys.stdout # On Windows the C runtime file orientation needs changing. if sys.platform == "win32": import msvcrt import os msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY) return stdout
[ "def", "binary_stdout", "(", ")", ":", "# First there is a Python3 issue.", "try", ":", "stdout", "=", "sys", ".", "stdout", ".", "buffer", "except", "AttributeError", ":", "# Probably Python 2, where bytes are strings.", "stdout", "=", "sys", ".", "stdout", "# On Win...
A sys.stdout that accepts bytes.
[ "A", "sys", ".", "stdout", "that", "accepts", "bytes", "." ]
b8220ca9f58e4c5bc1d507e713744fcb8c049225
https://github.com/drj11/pypng/blob/b8220ca9f58e4c5bc1d507e713744fcb8c049225/code/png.py#L2317-L2335
train
212,307
drj11/pypng
code/png.py
Writer.write_packed
def write_packed(self, outfile, rows): """ Write PNG file to `outfile`. `rows` should be an iterator that yields each packed row; a packed row being a sequence of packed bytes. The rows have a filter byte prefixed and are then compressed into one or more IDAT chunks. They are not processed any further, so if bitdepth is other than 1, 2, 4, 8, 16, the pixel values should have been scaled before passing them to this method. This method does work for interlaced images but it is best avoided. For interlaced images, the rows should be presented in the order that they appear in the file. """ self.write_preamble(outfile) # http://www.w3.org/TR/PNG/#11IDAT if self.compression is not None: compressor = zlib.compressobj(self.compression) else: compressor = zlib.compressobj() # data accumulates bytes to be compressed for the IDAT chunk; # it's compressed when sufficiently large. data = bytearray() for i, row in enumerate(rows): # Add "None" filter type. # Currently, it's essential that this filter type be used # for every scanline as # we do not mark the first row of a reduced pass image; # that means we could accidentally compute # the wrong filtered scanline if we used # "up", "average", or "paeth" on such a line. data.append(0) data.extend(row) if len(data) > self.chunk_limit: # :todo: bytes() only necessary in Python 2 compressed = compressor.compress(bytes(data)) if len(compressed): write_chunk(outfile, b'IDAT', compressed) data = bytearray() compressed = compressor.compress(bytes(data)) flushed = compressor.flush() if len(compressed) or len(flushed): write_chunk(outfile, b'IDAT', compressed + flushed) # http://www.w3.org/TR/PNG/#11IEND write_chunk(outfile, b'IEND') return i + 1
python
def write_packed(self, outfile, rows): """ Write PNG file to `outfile`. `rows` should be an iterator that yields each packed row; a packed row being a sequence of packed bytes. The rows have a filter byte prefixed and are then compressed into one or more IDAT chunks. They are not processed any further, so if bitdepth is other than 1, 2, 4, 8, 16, the pixel values should have been scaled before passing them to this method. This method does work for interlaced images but it is best avoided. For interlaced images, the rows should be presented in the order that they appear in the file. """ self.write_preamble(outfile) # http://www.w3.org/TR/PNG/#11IDAT if self.compression is not None: compressor = zlib.compressobj(self.compression) else: compressor = zlib.compressobj() # data accumulates bytes to be compressed for the IDAT chunk; # it's compressed when sufficiently large. data = bytearray() for i, row in enumerate(rows): # Add "None" filter type. # Currently, it's essential that this filter type be used # for every scanline as # we do not mark the first row of a reduced pass image; # that means we could accidentally compute # the wrong filtered scanline if we used # "up", "average", or "paeth" on such a line. data.append(0) data.extend(row) if len(data) > self.chunk_limit: # :todo: bytes() only necessary in Python 2 compressed = compressor.compress(bytes(data)) if len(compressed): write_chunk(outfile, b'IDAT', compressed) data = bytearray() compressed = compressor.compress(bytes(data)) flushed = compressor.flush() if len(compressed) or len(flushed): write_chunk(outfile, b'IDAT', compressed + flushed) # http://www.w3.org/TR/PNG/#11IEND write_chunk(outfile, b'IEND') return i + 1
[ "def", "write_packed", "(", "self", ",", "outfile", ",", "rows", ")", ":", "self", ".", "write_preamble", "(", "outfile", ")", "# http://www.w3.org/TR/PNG/#11IDAT", "if", "self", ".", "compression", "is", "not", "None", ":", "compressor", "=", "zlib", ".", "...
Write PNG file to `outfile`. `rows` should be an iterator that yields each packed row; a packed row being a sequence of packed bytes. The rows have a filter byte prefixed and are then compressed into one or more IDAT chunks. They are not processed any further, so if bitdepth is other than 1, 2, 4, 8, 16, the pixel values should have been scaled before passing them to this method. This method does work for interlaced images but it is best avoided. For interlaced images, the rows should be presented in the order that they appear in the file.
[ "Write", "PNG", "file", "to", "outfile", ".", "rows", "should", "be", "an", "iterator", "that", "yields", "each", "packed", "row", ";", "a", "packed", "row", "being", "a", "sequence", "of", "packed", "bytes", "." ]
b8220ca9f58e4c5bc1d507e713744fcb8c049225
https://github.com/drj11/pypng/blob/b8220ca9f58e4c5bc1d507e713744fcb8c049225/code/png.py#L706-L759
train
212,308
drj11/pypng
code/png.py
Writer.array_scanlines_interlace
def array_scanlines_interlace(self, pixels): """ Generator for interlaced scanlines from an array. `pixels` is the full source image as a single array of values. The generator yields each scanline of the reduced passes in turn, each scanline being a sequence of values. """ # http://www.w3.org/TR/PNG/#8InterlaceMethods # Array type. fmt = 'BH'[self.bitdepth > 8] # Value per row vpr = self.width * self.planes # Each iteration generates a scanline starting at (x, y) # and consisting of every xstep pixels. for lines in adam7_generate(self.width, self.height): for x, y, xstep in lines: # Pixels per row (of reduced image) ppr = int(math.ceil((self.width - x) / float(xstep))) # Values per row (of reduced image) reduced_row_len = ppr * self.planes if xstep == 1: # Easy case: line is a simple slice. offset = y * vpr yield pixels[offset: offset + vpr] continue # We have to step by xstep, # which we can do one plane at a time # using the step in Python slices. row = array(fmt) # There's no easier way to set the length of an array row.extend(pixels[0:reduced_row_len]) offset = y * vpr + x * self.planes end_offset = (y + 1) * vpr skip = self.planes * xstep for i in range(self.planes): row[i::self.planes] = \ pixels[offset + i: end_offset: skip] yield row
python
def array_scanlines_interlace(self, pixels): """ Generator for interlaced scanlines from an array. `pixels` is the full source image as a single array of values. The generator yields each scanline of the reduced passes in turn, each scanline being a sequence of values. """ # http://www.w3.org/TR/PNG/#8InterlaceMethods # Array type. fmt = 'BH'[self.bitdepth > 8] # Value per row vpr = self.width * self.planes # Each iteration generates a scanline starting at (x, y) # and consisting of every xstep pixels. for lines in adam7_generate(self.width, self.height): for x, y, xstep in lines: # Pixels per row (of reduced image) ppr = int(math.ceil((self.width - x) / float(xstep))) # Values per row (of reduced image) reduced_row_len = ppr * self.planes if xstep == 1: # Easy case: line is a simple slice. offset = y * vpr yield pixels[offset: offset + vpr] continue # We have to step by xstep, # which we can do one plane at a time # using the step in Python slices. row = array(fmt) # There's no easier way to set the length of an array row.extend(pixels[0:reduced_row_len]) offset = y * vpr + x * self.planes end_offset = (y + 1) * vpr skip = self.planes * xstep for i in range(self.planes): row[i::self.planes] = \ pixels[offset + i: end_offset: skip] yield row
[ "def", "array_scanlines_interlace", "(", "self", ",", "pixels", ")", ":", "# http://www.w3.org/TR/PNG/#8InterlaceMethods", "# Array type.", "fmt", "=", "'BH'", "[", "self", ".", "bitdepth", ">", "8", "]", "# Value per row", "vpr", "=", "self", ".", "width", "*", ...
Generator for interlaced scanlines from an array. `pixels` is the full source image as a single array of values. The generator yields each scanline of the reduced passes in turn, each scanline being a sequence of values.
[ "Generator", "for", "interlaced", "scanlines", "from", "an", "array", ".", "pixels", "is", "the", "full", "source", "image", "as", "a", "single", "array", "of", "values", ".", "The", "generator", "yields", "each", "scanline", "of", "the", "reduced", "passes"...
b8220ca9f58e4c5bc1d507e713744fcb8c049225
https://github.com/drj11/pypng/blob/b8220ca9f58e4c5bc1d507e713744fcb8c049225/code/png.py#L854-L893
train
212,309
drj11/pypng
code/png.py
Image.write
def write(self, file): """Write the image to the open file object. See `.save()` if you have a filename. In general, you can only call this method once; after it has been called the first time the PNG image is written, the source data will have been streamed, and cannot be streamed again. """ w = Writer(**self.info) w.write(file, self.rows)
python
def write(self, file): """Write the image to the open file object. See `.save()` if you have a filename. In general, you can only call this method once; after it has been called the first time the PNG image is written, the source data will have been streamed, and cannot be streamed again. """ w = Writer(**self.info) w.write(file, self.rows)
[ "def", "write", "(", "self", ",", "file", ")", ":", "w", "=", "Writer", "(", "*", "*", "self", ".", "info", ")", "w", ".", "write", "(", "file", ",", "self", ".", "rows", ")" ]
Write the image to the open file object. See `.save()` if you have a filename. In general, you can only call this method once; after it has been called the first time the PNG image is written, the source data will have been streamed, and cannot be streamed again.
[ "Write", "the", "image", "to", "the", "open", "file", "object", "." ]
b8220ca9f58e4c5bc1d507e713744fcb8c049225
https://github.com/drj11/pypng/blob/b8220ca9f58e4c5bc1d507e713744fcb8c049225/code/png.py#L1285-L1297
train
212,310
drj11/pypng
code/png.py
Reader._deinterlace
def _deinterlace(self, raw): """ Read raw pixel data, undo filters, deinterlace, and flatten. Return a single array of values. """ # Values per row (of the target image) vpr = self.width * self.planes # Values per image vpi = vpr * self.height # Interleaving writes to the output array randomly # (well, not quite), so the entire output array must be in memory. # Make a result array, and make it big enough. if self.bitdepth > 8: a = array('H', [0] * vpi) else: a = bytearray([0] * vpi) source_offset = 0 for lines in adam7_generate(self.width, self.height): # The previous (reconstructed) scanline. # `None` at the beginning of a pass # to indicate that there is no previous line. recon = None for x, y, xstep in lines: # Pixels per row (reduced pass image) ppr = int(math.ceil((self.width - x) / float(xstep))) # Row size in bytes for this pass. row_size = int(math.ceil(self.psize * ppr)) filter_type = raw[source_offset] source_offset += 1 scanline = raw[source_offset: source_offset + row_size] source_offset += row_size recon = self.undo_filter(filter_type, scanline, recon) # Convert so that there is one element per pixel value flat = self._bytes_to_values(recon, width=ppr) if xstep == 1: assert x == 0 offset = y * vpr a[offset: offset + vpr] = flat else: offset = y * vpr + x * self.planes end_offset = (y + 1) * vpr skip = self.planes * xstep for i in range(self.planes): a[offset + i: end_offset: skip] = \ flat[i:: self.planes] return a
python
def _deinterlace(self, raw): """ Read raw pixel data, undo filters, deinterlace, and flatten. Return a single array of values. """ # Values per row (of the target image) vpr = self.width * self.planes # Values per image vpi = vpr * self.height # Interleaving writes to the output array randomly # (well, not quite), so the entire output array must be in memory. # Make a result array, and make it big enough. if self.bitdepth > 8: a = array('H', [0] * vpi) else: a = bytearray([0] * vpi) source_offset = 0 for lines in adam7_generate(self.width, self.height): # The previous (reconstructed) scanline. # `None` at the beginning of a pass # to indicate that there is no previous line. recon = None for x, y, xstep in lines: # Pixels per row (reduced pass image) ppr = int(math.ceil((self.width - x) / float(xstep))) # Row size in bytes for this pass. row_size = int(math.ceil(self.psize * ppr)) filter_type = raw[source_offset] source_offset += 1 scanline = raw[source_offset: source_offset + row_size] source_offset += row_size recon = self.undo_filter(filter_type, scanline, recon) # Convert so that there is one element per pixel value flat = self._bytes_to_values(recon, width=ppr) if xstep == 1: assert x == 0 offset = y * vpr a[offset: offset + vpr] = flat else: offset = y * vpr + x * self.planes end_offset = (y + 1) * vpr skip = self.planes * xstep for i in range(self.planes): a[offset + i: end_offset: skip] = \ flat[i:: self.planes] return a
[ "def", "_deinterlace", "(", "self", ",", "raw", ")", ":", "# Values per row (of the target image)", "vpr", "=", "self", ".", "width", "*", "self", ".", "planes", "# Values per image", "vpi", "=", "vpr", "*", "self", ".", "height", "# Interleaving writes to the out...
Read raw pixel data, undo filters, deinterlace, and flatten. Return a single array of values.
[ "Read", "raw", "pixel", "data", "undo", "filters", "deinterlace", "and", "flatten", ".", "Return", "a", "single", "array", "of", "values", "." ]
b8220ca9f58e4c5bc1d507e713744fcb8c049225
https://github.com/drj11/pypng/blob/b8220ca9f58e4c5bc1d507e713744fcb8c049225/code/png.py#L1468-L1518
train
212,311
drj11/pypng
code/png.py
Reader._bytes_to_values
def _bytes_to_values(self, bs, width=None): """Convert a packed row of bytes into a row of values. Result will be a freshly allocated object, not shared with the argument. """ if self.bitdepth == 8: return bytearray(bs) if self.bitdepth == 16: return array('H', struct.unpack('!%dH' % (len(bs) // 2), bs)) assert self.bitdepth < 8 if width is None: width = self.width # Samples per byte spb = 8 // self.bitdepth out = bytearray() mask = 2**self.bitdepth - 1 shifts = [self.bitdepth * i for i in reversed(list(range(spb)))] for o in bs: out.extend([mask & (o >> i) for i in shifts]) return out[:width]
python
def _bytes_to_values(self, bs, width=None): """Convert a packed row of bytes into a row of values. Result will be a freshly allocated object, not shared with the argument. """ if self.bitdepth == 8: return bytearray(bs) if self.bitdepth == 16: return array('H', struct.unpack('!%dH' % (len(bs) // 2), bs)) assert self.bitdepth < 8 if width is None: width = self.width # Samples per byte spb = 8 // self.bitdepth out = bytearray() mask = 2**self.bitdepth - 1 shifts = [self.bitdepth * i for i in reversed(list(range(spb)))] for o in bs: out.extend([mask & (o >> i) for i in shifts]) return out[:width]
[ "def", "_bytes_to_values", "(", "self", ",", "bs", ",", "width", "=", "None", ")", ":", "if", "self", ".", "bitdepth", "==", "8", ":", "return", "bytearray", "(", "bs", ")", "if", "self", ".", "bitdepth", "==", "16", ":", "return", "array", "(", "'...
Convert a packed row of bytes into a row of values. Result will be a freshly allocated object, not shared with the argument.
[ "Convert", "a", "packed", "row", "of", "bytes", "into", "a", "row", "of", "values", ".", "Result", "will", "be", "a", "freshly", "allocated", "object", "not", "shared", "with", "the", "argument", "." ]
b8220ca9f58e4c5bc1d507e713744fcb8c049225
https://github.com/drj11/pypng/blob/b8220ca9f58e4c5bc1d507e713744fcb8c049225/code/png.py#L1531-L1554
train
212,312
drj11/pypng
code/png.py
Reader._iter_straight_packed
def _iter_straight_packed(self, byte_blocks): """Iterator that undoes the effect of filtering; yields each row as a sequence of packed bytes. Assumes input is straightlaced. `byte_blocks` should be an iterable that yields the raw bytes in blocks of arbitrary size. """ # length of row, in bytes rb = self.row_bytes a = bytearray() # The previous (reconstructed) scanline. # None indicates first line of image. recon = None for some_bytes in byte_blocks: a.extend(some_bytes) while len(a) >= rb + 1: filter_type = a[0] scanline = a[1: rb + 1] del a[: rb + 1] recon = self.undo_filter(filter_type, scanline, recon) yield recon if len(a) != 0: # :file:format We get here with a file format error: # when the available bytes (after decompressing) do not # pack into exact rows. raise FormatError('Wrong size for decompressed IDAT chunk.') assert len(a) == 0
python
def _iter_straight_packed(self, byte_blocks): """Iterator that undoes the effect of filtering; yields each row as a sequence of packed bytes. Assumes input is straightlaced. `byte_blocks` should be an iterable that yields the raw bytes in blocks of arbitrary size. """ # length of row, in bytes rb = self.row_bytes a = bytearray() # The previous (reconstructed) scanline. # None indicates first line of image. recon = None for some_bytes in byte_blocks: a.extend(some_bytes) while len(a) >= rb + 1: filter_type = a[0] scanline = a[1: rb + 1] del a[: rb + 1] recon = self.undo_filter(filter_type, scanline, recon) yield recon if len(a) != 0: # :file:format We get here with a file format error: # when the available bytes (after decompressing) do not # pack into exact rows. raise FormatError('Wrong size for decompressed IDAT chunk.') assert len(a) == 0
[ "def", "_iter_straight_packed", "(", "self", ",", "byte_blocks", ")", ":", "# length of row, in bytes", "rb", "=", "self", ".", "row_bytes", "a", "=", "bytearray", "(", ")", "# The previous (reconstructed) scanline.", "# None indicates first line of image.", "recon", "=",...
Iterator that undoes the effect of filtering; yields each row as a sequence of packed bytes. Assumes input is straightlaced. `byte_blocks` should be an iterable that yields the raw bytes in blocks of arbitrary size.
[ "Iterator", "that", "undoes", "the", "effect", "of", "filtering", ";", "yields", "each", "row", "as", "a", "sequence", "of", "packed", "bytes", ".", "Assumes", "input", "is", "straightlaced", ".", "byte_blocks", "should", "be", "an", "iterable", "that", "yie...
b8220ca9f58e4c5bc1d507e713744fcb8c049225
https://github.com/drj11/pypng/blob/b8220ca9f58e4c5bc1d507e713744fcb8c049225/code/png.py#L1556-L1583
train
212,313
drj11/pypng
code/png.py
Reader.preamble
def preamble(self, lenient=False): """ Extract the image metadata by reading the initial part of the PNG file up to the start of the ``IDAT`` chunk. All the chunks that precede the ``IDAT`` chunk are read and either processed for metadata or discarded. If the optional `lenient` argument evaluates to `True`, checksum failures will raise warnings rather than exceptions. """ self.validate_signature() while True: if not self.atchunk: self.atchunk = self._chunk_len_type() if self.atchunk is None: raise FormatError('This PNG file has no IDAT chunks.') if self.atchunk[1] == b'IDAT': return self.process_chunk(lenient=lenient)
python
def preamble(self, lenient=False): """ Extract the image metadata by reading the initial part of the PNG file up to the start of the ``IDAT`` chunk. All the chunks that precede the ``IDAT`` chunk are read and either processed for metadata or discarded. If the optional `lenient` argument evaluates to `True`, checksum failures will raise warnings rather than exceptions. """ self.validate_signature() while True: if not self.atchunk: self.atchunk = self._chunk_len_type() if self.atchunk is None: raise FormatError('This PNG file has no IDAT chunks.') if self.atchunk[1] == b'IDAT': return self.process_chunk(lenient=lenient)
[ "def", "preamble", "(", "self", ",", "lenient", "=", "False", ")", ":", "self", ".", "validate_signature", "(", ")", "while", "True", ":", "if", "not", "self", ".", "atchunk", ":", "self", ".", "atchunk", "=", "self", ".", "_chunk_len_type", "(", ")", ...
Extract the image metadata by reading the initial part of the PNG file up to the start of the ``IDAT`` chunk. All the chunks that precede the ``IDAT`` chunk are read and either processed for metadata or discarded. If the optional `lenient` argument evaluates to `True`, checksum failures will raise warnings rather than exceptions.
[ "Extract", "the", "image", "metadata", "by", "reading", "the", "initial", "part", "of", "the", "PNG", "file", "up", "to", "the", "start", "of", "the", "IDAT", "chunk", ".", "All", "the", "chunks", "that", "precede", "the", "IDAT", "chunk", "are", "read",...
b8220ca9f58e4c5bc1d507e713744fcb8c049225
https://github.com/drj11/pypng/blob/b8220ca9f58e4c5bc1d507e713744fcb8c049225/code/png.py#L1597-L1618
train
212,314
drj11/pypng
code/pngsuite.py
_dehex
def _dehex(s): """Liberally convert from hex string to binary string.""" import re import binascii # Remove all non-hexadecimal digits s = re.sub(br'[^a-fA-F\d]', b'', s) # binscii.unhexlify works in Python 2 and Python 3 (unlike # thing.decode('hex')). return binascii.unhexlify(s)
python
def _dehex(s): """Liberally convert from hex string to binary string.""" import re import binascii # Remove all non-hexadecimal digits s = re.sub(br'[^a-fA-F\d]', b'', s) # binscii.unhexlify works in Python 2 and Python 3 (unlike # thing.decode('hex')). return binascii.unhexlify(s)
[ "def", "_dehex", "(", "s", ")", ":", "import", "re", "import", "binascii", "# Remove all non-hexadecimal digits", "s", "=", "re", ".", "sub", "(", "br'[^a-fA-F\\d]'", ",", "b''", ",", "s", ")", "# binscii.unhexlify works in Python 2 and Python 3 (unlike", "# thing.dec...
Liberally convert from hex string to binary string.
[ "Liberally", "convert", "from", "hex", "string", "to", "binary", "string", "." ]
b8220ca9f58e4c5bc1d507e713744fcb8c049225
https://github.com/drj11/pypng/blob/b8220ca9f58e4c5bc1d507e713744fcb8c049225/code/pngsuite.py#L20-L29
train
212,315
drj11/pypng
code/iccp.py
s15f16l
def s15f16l(s): """Convert sequence of ICC s15Fixed16 to list of float.""" # Note: As long as float has at least 32 bits of mantissa, all # values are preserved. n = len(s) // 4 t = struct.unpack('>%dl' % n, s) return map((2**-16).__mul__, t)
python
def s15f16l(s): """Convert sequence of ICC s15Fixed16 to list of float.""" # Note: As long as float has at least 32 bits of mantissa, all # values are preserved. n = len(s) // 4 t = struct.unpack('>%dl' % n, s) return map((2**-16).__mul__, t)
[ "def", "s15f16l", "(", "s", ")", ":", "# Note: As long as float has at least 32 bits of mantissa, all", "# values are preserved.", "n", "=", "len", "(", "s", ")", "//", "4", "t", "=", "struct", ".", "unpack", "(", "'>%dl'", "%", "n", ",", "s", ")", "return", ...
Convert sequence of ICC s15Fixed16 to list of float.
[ "Convert", "sequence", "of", "ICC", "s15Fixed16", "to", "list", "of", "float", "." ]
b8220ca9f58e4c5bc1d507e713744fcb8c049225
https://github.com/drj11/pypng/blob/b8220ca9f58e4c5bc1d507e713744fcb8c049225/code/iccp.py#L427-L433
train
212,316
drj11/pypng
code/iccp.py
RDcurv
def RDcurv(s): """Convert ICC curveType.""" # See [ICC 2001] 6.5.3 assert s[0:4] == 'curv' count, = struct.unpack('>L', s[8:12]) if count == 0: return dict(gamma=1) table = struct.unpack('>%dH' % count, s[12:]) if count == 1: return dict(gamma=table[0] * 2 ** -8) return table
python
def RDcurv(s): """Convert ICC curveType.""" # See [ICC 2001] 6.5.3 assert s[0:4] == 'curv' count, = struct.unpack('>L', s[8:12]) if count == 0: return dict(gamma=1) table = struct.unpack('>%dH' % count, s[12:]) if count == 1: return dict(gamma=table[0] * 2 ** -8) return table
[ "def", "RDcurv", "(", "s", ")", ":", "# See [ICC 2001] 6.5.3", "assert", "s", "[", "0", ":", "4", "]", "==", "'curv'", "count", ",", "=", "struct", ".", "unpack", "(", "'>L'", ",", "s", "[", "8", ":", "12", "]", ")", "if", "count", "==", "0", "...
Convert ICC curveType.
[ "Convert", "ICC", "curveType", "." ]
b8220ca9f58e4c5bc1d507e713744fcb8c049225
https://github.com/drj11/pypng/blob/b8220ca9f58e4c5bc1d507e713744fcb8c049225/code/iccp.py#L504-L514
train
212,317
drj11/pypng
code/iccp.py
RDvcgt
def RDvcgt(s): """Convert Apple CMVideoCardGammaType.""" # See # http://developer.apple.com/documentation/GraphicsImaging/Reference/ColorSync_Manager/Reference/reference.html#//apple_ref/c/tdef/CMVideoCardGammaType assert s[0:4] == 'vcgt' tagtype, = struct.unpack('>L', s[8:12]) if tagtype != 0: return s[8:] if tagtype == 0: # Table. channels, count, size = struct.unpack('>3H', s[12:18]) if size == 1: fmt = 'B' elif size == 2: fmt = 'H' else: return s[8:] n = len(s[18:]) // size t = struct.unpack('>%d%s' % (n, fmt), s[18:]) t = group(t, count) return size, t return s[8:]
python
def RDvcgt(s): """Convert Apple CMVideoCardGammaType.""" # See # http://developer.apple.com/documentation/GraphicsImaging/Reference/ColorSync_Manager/Reference/reference.html#//apple_ref/c/tdef/CMVideoCardGammaType assert s[0:4] == 'vcgt' tagtype, = struct.unpack('>L', s[8:12]) if tagtype != 0: return s[8:] if tagtype == 0: # Table. channels, count, size = struct.unpack('>3H', s[12:18]) if size == 1: fmt = 'B' elif size == 2: fmt = 'H' else: return s[8:] n = len(s[18:]) // size t = struct.unpack('>%d%s' % (n, fmt), s[18:]) t = group(t, count) return size, t return s[8:]
[ "def", "RDvcgt", "(", "s", ")", ":", "# See", "# http://developer.apple.com/documentation/GraphicsImaging/Reference/ColorSync_Manager/Reference/reference.html#//apple_ref/c/tdef/CMVideoCardGammaType", "assert", "s", "[", "0", ":", "4", "]", "==", "'vcgt'", "tagtype", ",", "=", ...
Convert Apple CMVideoCardGammaType.
[ "Convert", "Apple", "CMVideoCardGammaType", "." ]
b8220ca9f58e4c5bc1d507e713744fcb8c049225
https://github.com/drj11/pypng/blob/b8220ca9f58e4c5bc1d507e713744fcb8c049225/code/iccp.py#L517-L538
train
212,318
drj11/pypng
code/iccp.py
Profile.greyInput
def greyInput(self): """Adjust ``self.d`` dictionary for greyscale input device. ``profileclass`` is 'scnr', ``colourspace`` is 'GRAY', ``pcs`` is 'XYZ '. """ self.d.update(dict(profileclass='scnr', colourspace='GRAY', pcs='XYZ ')) return self
python
def greyInput(self): """Adjust ``self.d`` dictionary for greyscale input device. ``profileclass`` is 'scnr', ``colourspace`` is 'GRAY', ``pcs`` is 'XYZ '. """ self.d.update(dict(profileclass='scnr', colourspace='GRAY', pcs='XYZ ')) return self
[ "def", "greyInput", "(", "self", ")", ":", "self", ".", "d", ".", "update", "(", "dict", "(", "profileclass", "=", "'scnr'", ",", "colourspace", "=", "'GRAY'", ",", "pcs", "=", "'XYZ '", ")", ")", "return", "self" ]
Adjust ``self.d`` dictionary for greyscale input device. ``profileclass`` is 'scnr', ``colourspace`` is 'GRAY', ``pcs`` is 'XYZ '.
[ "Adjust", "self", ".", "d", "dictionary", "for", "greyscale", "input", "device", ".", "profileclass", "is", "scnr", "colourspace", "is", "GRAY", "pcs", "is", "XYZ", "." ]
b8220ca9f58e4c5bc1d507e713744fcb8c049225
https://github.com/drj11/pypng/blob/b8220ca9f58e4c5bc1d507e713744fcb8c049225/code/iccp.py#L114-L122
train
212,319
drj11/pypng
code/iccp.py
Profile.write
def write(self, out): """Write ICC Profile to the file.""" if not self.rawtagtable: self.rawtagtable = self.rawtagdict.items() tags = tagblock(self.rawtagtable) self.writeHeader(out, 128 + len(tags)) out.write(tags) out.flush() return self
python
def write(self, out): """Write ICC Profile to the file.""" if not self.rawtagtable: self.rawtagtable = self.rawtagdict.items() tags = tagblock(self.rawtagtable) self.writeHeader(out, 128 + len(tags)) out.write(tags) out.flush() return self
[ "def", "write", "(", "self", ",", "out", ")", ":", "if", "not", "self", ".", "rawtagtable", ":", "self", ".", "rawtagtable", "=", "self", ".", "rawtagdict", ".", "items", "(", ")", "tags", "=", "tagblock", "(", "self", ".", "rawtagtable", ")", "self"...
Write ICC Profile to the file.
[ "Write", "ICC", "Profile", "to", "the", "file", "." ]
b8220ca9f58e4c5bc1d507e713744fcb8c049225
https://github.com/drj11/pypng/blob/b8220ca9f58e4c5bc1d507e713744fcb8c049225/code/iccp.py#L147-L157
train
212,320
drj11/pypng
code/iccp.py
Profile.writeHeader
def writeHeader(self, out, size=999): """Add default values to the instance's `d` dictionary, then write a header out onto the file stream. The size of the profile must be specified using the `size` argument. """ def defaultkey(d, key, value): """Add ``[key]==value`` to the dictionary `d`, but only if it does not have that key already. """ if key in d: return d[key] = value z = '\x00' * 4 defaults = dict(preferredCMM=z, version='02000000', profileclass=z, colourspace=z, pcs='XYZ ', created=writeICCdatetime(), acsp='acsp', platform=z, flag=0, manufacturer=z, model=0, deviceattributes=0, intent=0, pcsilluminant=encodefuns()['XYZ'](*D50()), creator=z, ) for k, v in defaults.items(): defaultkey(self.d, k, v) hl = map(self.d.__getitem__, ['preferredCMM', 'version', 'profileclass', 'colourspace', 'pcs', 'created', 'acsp', 'platform', 'flag', 'manufacturer', 'model', 'deviceattributes', 'intent', 'pcsilluminant', 'creator']) # Convert to struct.pack input hl[1] = int(hl[1], 16) out.write(struct.pack('>L4sL4s4s4s12s4s4sL4sLQL12s4s', size, *hl)) out.write('\x00' * 44) return self
python
def writeHeader(self, out, size=999): """Add default values to the instance's `d` dictionary, then write a header out onto the file stream. The size of the profile must be specified using the `size` argument. """ def defaultkey(d, key, value): """Add ``[key]==value`` to the dictionary `d`, but only if it does not have that key already. """ if key in d: return d[key] = value z = '\x00' * 4 defaults = dict(preferredCMM=z, version='02000000', profileclass=z, colourspace=z, pcs='XYZ ', created=writeICCdatetime(), acsp='acsp', platform=z, flag=0, manufacturer=z, model=0, deviceattributes=0, intent=0, pcsilluminant=encodefuns()['XYZ'](*D50()), creator=z, ) for k, v in defaults.items(): defaultkey(self.d, k, v) hl = map(self.d.__getitem__, ['preferredCMM', 'version', 'profileclass', 'colourspace', 'pcs', 'created', 'acsp', 'platform', 'flag', 'manufacturer', 'model', 'deviceattributes', 'intent', 'pcsilluminant', 'creator']) # Convert to struct.pack input hl[1] = int(hl[1], 16) out.write(struct.pack('>L4sL4s4s4s12s4s4sL4sLQL12s4s', size, *hl)) out.write('\x00' * 44) return self
[ "def", "writeHeader", "(", "self", ",", "out", ",", "size", "=", "999", ")", ":", "def", "defaultkey", "(", "d", ",", "key", ",", "value", ")", ":", "\"\"\"Add ``[key]==value`` to the dictionary `d`, but only if\n it does not have that key already.\n ...
Add default values to the instance's `d` dictionary, then write a header out onto the file stream. The size of the profile must be specified using the `size` argument.
[ "Add", "default", "values", "to", "the", "instance", "s", "d", "dictionary", "then", "write", "a", "header", "out", "onto", "the", "file", "stream", ".", "The", "size", "of", "the", "profile", "must", "be", "specified", "using", "the", "size", "argument", ...
b8220ca9f58e4c5bc1d507e713744fcb8c049225
https://github.com/drj11/pypng/blob/b8220ca9f58e4c5bc1d507e713744fcb8c049225/code/iccp.py#L159-L204
train
212,321
drj11/pypng
code/plan9topng.py
convert
def convert(f, output=sys.stdout): """Convert Plan 9 file to PNG format. Works with either uncompressed or compressed files. """ r = f.read(11) if r == 'compressed\n': png(output, *decompress(f)) else: png(output, *glue(f, r))
python
def convert(f, output=sys.stdout): """Convert Plan 9 file to PNG format. Works with either uncompressed or compressed files. """ r = f.read(11) if r == 'compressed\n': png(output, *decompress(f)) else: png(output, *glue(f, r))
[ "def", "convert", "(", "f", ",", "output", "=", "sys", ".", "stdout", ")", ":", "r", "=", "f", ".", "read", "(", "11", ")", "if", "r", "==", "'compressed\\n'", ":", "png", "(", "output", ",", "*", "decompress", "(", "f", ")", ")", "else", ":", ...
Convert Plan 9 file to PNG format. Works with either uncompressed or compressed files.
[ "Convert", "Plan", "9", "file", "to", "PNG", "format", ".", "Works", "with", "either", "uncompressed", "or", "compressed", "files", "." ]
b8220ca9f58e4c5bc1d507e713744fcb8c049225
https://github.com/drj11/pypng/blob/b8220ca9f58e4c5bc1d507e713744fcb8c049225/code/plan9topng.py#L30-L39
train
212,322
drj11/pypng
code/plan9topng.py
bitdepthof
def bitdepthof(pixel): """Return the bitdepth for a Plan9 pixel format string.""" maxd = 0 for c in re.findall(r'[a-z]\d*', pixel): if c[0] != 'x': maxd = max(maxd, int(c[1:])) return maxd
python
def bitdepthof(pixel): """Return the bitdepth for a Plan9 pixel format string.""" maxd = 0 for c in re.findall(r'[a-z]\d*', pixel): if c[0] != 'x': maxd = max(maxd, int(c[1:])) return maxd
[ "def", "bitdepthof", "(", "pixel", ")", ":", "maxd", "=", "0", "for", "c", "in", "re", ".", "findall", "(", "r'[a-z]\\d*'", ",", "pixel", ")", ":", "if", "c", "[", "0", "]", "!=", "'x'", ":", "maxd", "=", "max", "(", "maxd", ",", "int", "(", ...
Return the bitdepth for a Plan9 pixel format string.
[ "Return", "the", "bitdepth", "for", "a", "Plan9", "pixel", "format", "string", "." ]
b8220ca9f58e4c5bc1d507e713744fcb8c049225
https://github.com/drj11/pypng/blob/b8220ca9f58e4c5bc1d507e713744fcb8c049225/code/plan9topng.py#L68-L75
train
212,323
drj11/pypng
code/plan9topng.py
decompress
def decompress(f): """Decompress a Plan 9 image file. Assumes f is already cued past the initial 'compressed\n' string. """ r = meta(f.read(60)) return r, decomprest(f, r[4])
python
def decompress(f): """Decompress a Plan 9 image file. Assumes f is already cued past the initial 'compressed\n' string. """ r = meta(f.read(60)) return r, decomprest(f, r[4])
[ "def", "decompress", "(", "f", ")", ":", "r", "=", "meta", "(", "f", ".", "read", "(", "60", ")", ")", "return", "r", ",", "decomprest", "(", "f", ",", "r", "[", "4", "]", ")" ]
Decompress a Plan 9 image file. Assumes f is already cued past the initial 'compressed\n' string.
[ "Decompress", "a", "Plan", "9", "image", "file", ".", "Assumes", "f", "is", "already", "cued", "past", "the", "initial", "compressed", "\\", "n", "string", "." ]
b8220ca9f58e4c5bc1d507e713744fcb8c049225
https://github.com/drj11/pypng/blob/b8220ca9f58e4c5bc1d507e713744fcb8c049225/code/plan9topng.py#L215-L221
train
212,324
drj11/pypng
code/plan9topng.py
decomprest
def decomprest(f, rows): """Iterator that decompresses the rest of a file once the metadata have been consumed.""" row = 0 while row < rows: row, o = deblock(f) yield o
python
def decomprest(f, rows): """Iterator that decompresses the rest of a file once the metadata have been consumed.""" row = 0 while row < rows: row, o = deblock(f) yield o
[ "def", "decomprest", "(", "f", ",", "rows", ")", ":", "row", "=", "0", "while", "row", "<", "rows", ":", "row", ",", "o", "=", "deblock", "(", "f", ")", "yield", "o" ]
Iterator that decompresses the rest of a file once the metadata have been consumed.
[ "Iterator", "that", "decompresses", "the", "rest", "of", "a", "file", "once", "the", "metadata", "have", "been", "consumed", "." ]
b8220ca9f58e4c5bc1d507e713744fcb8c049225
https://github.com/drj11/pypng/blob/b8220ca9f58e4c5bc1d507e713744fcb8c049225/code/plan9topng.py#L224-L231
train
212,325
YoSmudge/dnsyo
dnsyo/dnsyo.py
lookup.prepareList
def prepareList(self, listFile=False, noSample=False): """ Load and filter the server list for only the servers we care about """ logging.debug("Loading resolver file") listFileLocation = self.listLocal if not listFile else listFile # Resolve the user part of the path listLocal = os.path.expanduser(listFileLocation) # Check local file location exists and is writable assert os.path.isdir(os.path.dirname(listLocal)),\ "{0} is not a directory!".format(os.path.dirname(listLocal)) assert os.access(os.path.dirname(listLocal), os.W_OK),\ "{0} is not writable!".format(os.path.dirname(listLocal)) # Open and yaml parse the resolver list with open(listLocal) as ll: raw = ll.read() # Use safe_load, just to be safe. serverList = yaml.safe_load(raw) # Remove all but the specified countries from the server list if self.country is not None: logging.debug("Filtering serverList for country {0}" .format(self.country)) serverList = [d for d in serverList if d['country'] == self.country] if len(serverList) == 0: raise ValueError("There are no servers avaliable " "with the country code {0}" .format(self.country)) # Get selected number of servers if self.maxServers == 'ALL' or noSample: # Set servers to the number of servers we have self.maxServers = len(serverList) elif self.maxServers > len(serverList): # We were asked for more servers than exist in the list logging.warning( "You asked me to query {0} servers, but I only have " "{1} servers in my serverlist".format( self.maxServers, len(serverList) ) ) # Fallback to setting it to all self.maxServers = len(serverList) # Get a random selection of the specified number # of servers from the list self.serverList = random.sample(serverList, self.maxServers) return self.serverList
python
def prepareList(self, listFile=False, noSample=False): """ Load and filter the server list for only the servers we care about """ logging.debug("Loading resolver file") listFileLocation = self.listLocal if not listFile else listFile # Resolve the user part of the path listLocal = os.path.expanduser(listFileLocation) # Check local file location exists and is writable assert os.path.isdir(os.path.dirname(listLocal)),\ "{0} is not a directory!".format(os.path.dirname(listLocal)) assert os.access(os.path.dirname(listLocal), os.W_OK),\ "{0} is not writable!".format(os.path.dirname(listLocal)) # Open and yaml parse the resolver list with open(listLocal) as ll: raw = ll.read() # Use safe_load, just to be safe. serverList = yaml.safe_load(raw) # Remove all but the specified countries from the server list if self.country is not None: logging.debug("Filtering serverList for country {0}" .format(self.country)) serverList = [d for d in serverList if d['country'] == self.country] if len(serverList) == 0: raise ValueError("There are no servers avaliable " "with the country code {0}" .format(self.country)) # Get selected number of servers if self.maxServers == 'ALL' or noSample: # Set servers to the number of servers we have self.maxServers = len(serverList) elif self.maxServers > len(serverList): # We were asked for more servers than exist in the list logging.warning( "You asked me to query {0} servers, but I only have " "{1} servers in my serverlist".format( self.maxServers, len(serverList) ) ) # Fallback to setting it to all self.maxServers = len(serverList) # Get a random selection of the specified number # of servers from the list self.serverList = random.sample(serverList, self.maxServers) return self.serverList
[ "def", "prepareList", "(", "self", ",", "listFile", "=", "False", ",", "noSample", "=", "False", ")", ":", "logging", ".", "debug", "(", "\"Loading resolver file\"", ")", "listFileLocation", "=", "self", ".", "listLocal", "if", "not", "listFile", "else", "li...
Load and filter the server list for only the servers we care about
[ "Load", "and", "filter", "the", "server", "list", "for", "only", "the", "servers", "we", "care", "about" ]
4734e36d712fefeb9a8ff22dfba678e382dde6cf
https://github.com/YoSmudge/dnsyo/blob/4734e36d712fefeb9a8ff22dfba678e382dde6cf/dnsyo/dnsyo.py#L169-L227
train
212,326
YoSmudge/dnsyo
dnsyo/dnsyo.py
lookup.query
def query(self, domain, recordType, progress=True): """ Run the query Query spins out multiple thread workers to query each server @param domain: Domain to query @param recordType: Type of record to query for @param progress: Write progress to stdout @type domain: str @type recordType: str """ # Ignore domain validation, if someone wants to lookup an invalid # domain let them, just ensure it's a string assert type(domain) == str, "Domain must be a string" # Ensure record type is valid, and in our list of allowed records recordType = recordType.upper() assert recordType in self.lookupRecordTypes, \ "Record type is not in valid list of record types {0}". \ format(', '.join(self.lookupRecordTypes)) self.domain = domain self.recordType = recordType self.resultsColated = [] self.results = [] if len(self.serverList) == 0: logging.warning("Server list is empty. Attempting " "to populate with prepareList") self.prepareList() logging.debug("Starting query against {0} servers".format( len(self.serverList))) workers = [] startTime = datetime.utcnow() serverCounter = 0 # Run continuously while waiting for results while len(self.results) < len(self.serverList): # Count the workers still running runningWorkers = len([w for w in workers if w.result is None]) # Get the results of any finished workers for i, w in enumerate(workers): if w.result: # Add the results and get rid of the worker from the # worker list self.results.append(w.result) workers.pop(i) # Output progress if progress: # Output progress on one line that updates if terminal # supports it sys.stdout.write( "\r\x1b[KStatus: Queried {0} of {1} servers, duration: {2}" .format(len(self.results), len(self.serverList), (datetime.utcnow() - startTime)) ) # Make sure the stdout updates sys.stdout.flush() # Start more workers if needed if runningWorkers < self.maxWorkers: logging.debug("Starting {0} workers".format( self.maxWorkers - runningWorkers)) # Start however many workers we need # based on max workers - running workers for i in range(0, self.maxWorkers - runningWorkers): if serverCounter < len(self.serverList): # Create a new thread with all the details wt = QueryWorker() wt.server = self.serverList[serverCounter] wt.domain = domain wt.recType = recordType wt.daemon = True # Add it to the worker tracker workers.append(wt) # Start it wt.start() serverCounter += 1 # Pause a little bit time.sleep(0.1) # Now colate the results # Group by number of servers with the same response for r in self.results: # Result already in collation if r['results'] in [rs['results'] for rs in self.resultsColated]: cid = [ i for i, rs in enumerate(self.resultsColated) if r['results'] == rs['results'] ][0] self.resultsColated[cid]['servers'].append(r['server']) else: self.resultsColated.append( { 'servers': [ r['server'] ], 'results': r['results'], 'success': r['success'] } ) if progress: sys.stdout.write("\n\n") logging.debug("There are {0} unique results".format( len(self.resultsColated)))
python
def query(self, domain, recordType, progress=True): """ Run the query Query spins out multiple thread workers to query each server @param domain: Domain to query @param recordType: Type of record to query for @param progress: Write progress to stdout @type domain: str @type recordType: str """ # Ignore domain validation, if someone wants to lookup an invalid # domain let them, just ensure it's a string assert type(domain) == str, "Domain must be a string" # Ensure record type is valid, and in our list of allowed records recordType = recordType.upper() assert recordType in self.lookupRecordTypes, \ "Record type is not in valid list of record types {0}". \ format(', '.join(self.lookupRecordTypes)) self.domain = domain self.recordType = recordType self.resultsColated = [] self.results = [] if len(self.serverList) == 0: logging.warning("Server list is empty. Attempting " "to populate with prepareList") self.prepareList() logging.debug("Starting query against {0} servers".format( len(self.serverList))) workers = [] startTime = datetime.utcnow() serverCounter = 0 # Run continuously while waiting for results while len(self.results) < len(self.serverList): # Count the workers still running runningWorkers = len([w for w in workers if w.result is None]) # Get the results of any finished workers for i, w in enumerate(workers): if w.result: # Add the results and get rid of the worker from the # worker list self.results.append(w.result) workers.pop(i) # Output progress if progress: # Output progress on one line that updates if terminal # supports it sys.stdout.write( "\r\x1b[KStatus: Queried {0} of {1} servers, duration: {2}" .format(len(self.results), len(self.serverList), (datetime.utcnow() - startTime)) ) # Make sure the stdout updates sys.stdout.flush() # Start more workers if needed if runningWorkers < self.maxWorkers: logging.debug("Starting {0} workers".format( self.maxWorkers - runningWorkers)) # Start however many workers we need # based on max workers - running workers for i in range(0, self.maxWorkers - runningWorkers): if serverCounter < len(self.serverList): # Create a new thread with all the details wt = QueryWorker() wt.server = self.serverList[serverCounter] wt.domain = domain wt.recType = recordType wt.daemon = True # Add it to the worker tracker workers.append(wt) # Start it wt.start() serverCounter += 1 # Pause a little bit time.sleep(0.1) # Now colate the results # Group by number of servers with the same response for r in self.results: # Result already in collation if r['results'] in [rs['results'] for rs in self.resultsColated]: cid = [ i for i, rs in enumerate(self.resultsColated) if r['results'] == rs['results'] ][0] self.resultsColated[cid]['servers'].append(r['server']) else: self.resultsColated.append( { 'servers': [ r['server'] ], 'results': r['results'], 'success': r['success'] } ) if progress: sys.stdout.write("\n\n") logging.debug("There are {0} unique results".format( len(self.resultsColated)))
[ "def", "query", "(", "self", ",", "domain", ",", "recordType", ",", "progress", "=", "True", ")", ":", "# Ignore domain validation, if someone wants to lookup an invalid", "# domain let them, just ensure it's a string", "assert", "type", "(", "domain", ")", "==", "str", ...
Run the query Query spins out multiple thread workers to query each server @param domain: Domain to query @param recordType: Type of record to query for @param progress: Write progress to stdout @type domain: str @type recordType: str
[ "Run", "the", "query" ]
4734e36d712fefeb9a8ff22dfba678e382dde6cf
https://github.com/YoSmudge/dnsyo/blob/4734e36d712fefeb9a8ff22dfba678e382dde6cf/dnsyo/dnsyo.py#L229-L350
train
212,327
YoSmudge/dnsyo
dnsyo/dnsyo.py
lookup.outputSimple
def outputSimple(self): """ Simple output mode """ out = [] errors = [] successfulResponses = \ len([True for rsp in self.results if rsp['success']]) out.append("INFO QUERIED {0}".format( len(self.serverList))) out.append("INFO SUCCESS {0}".format( successfulResponses)) out.append("INFO ERROR {0}".format( len(self.serverList) - successfulResponses)) for rsp in self.resultsColated: if rsp['success']: out.append("RESULT {0} {1}".format( len(rsp['servers']), "|".join(rsp['results']) )) else: errors.append("ERROR {0} {1}".format( len(rsp['servers']), "|".join(rsp['results']) )) out += errors sys.stdout.write("\n".join(out)) sys.stdout.write("\n")
python
def outputSimple(self): """ Simple output mode """ out = [] errors = [] successfulResponses = \ len([True for rsp in self.results if rsp['success']]) out.append("INFO QUERIED {0}".format( len(self.serverList))) out.append("INFO SUCCESS {0}".format( successfulResponses)) out.append("INFO ERROR {0}".format( len(self.serverList) - successfulResponses)) for rsp in self.resultsColated: if rsp['success']: out.append("RESULT {0} {1}".format( len(rsp['servers']), "|".join(rsp['results']) )) else: errors.append("ERROR {0} {1}".format( len(rsp['servers']), "|".join(rsp['results']) )) out += errors sys.stdout.write("\n".join(out)) sys.stdout.write("\n")
[ "def", "outputSimple", "(", "self", ")", ":", "out", "=", "[", "]", "errors", "=", "[", "]", "successfulResponses", "=", "len", "(", "[", "True", "for", "rsp", "in", "self", ".", "results", "if", "rsp", "[", "'success'", "]", "]", ")", "out", ".", ...
Simple output mode
[ "Simple", "output", "mode" ]
4734e36d712fefeb9a8ff22dfba678e382dde6cf
https://github.com/YoSmudge/dnsyo/blob/4734e36d712fefeb9a8ff22dfba678e382dde6cf/dnsyo/dnsyo.py#L409-L442
train
212,328
YoSmudge/dnsyo
dnsyo/dnsyo.py
QueryWorker.run
def run(self): """ Do a single DNS query against a server """ logging.debug("Querying server {0}".format(self.server['ip'])) try: # Create a DNS resolver query rsvr = dns.resolver.Resolver() rsvr.nameservers = [self.server['ip']] rsvr.lifetime = 5 rsvr.timeout = 5 qry = rsvr.query(self.domain, self.recType) # Get the results, sort for consistancy results = sorted([r.to_text() for r in qry]) success = True # Handle all the various exceptions except dns.resolver.NXDOMAIN: success = False results = ['NXDOMAIN'] except dns.resolver.NoNameservers: success = False results = ['No Nameservers'] except dns.resolver.NoAnswer: success = False results = ['No Answer'] except dns.resolver.Timeout: success = False results = ['Server Timeout'] # Save the results self.result = { 'server': self.server, 'results': results, 'success': success }
python
def run(self): """ Do a single DNS query against a server """ logging.debug("Querying server {0}".format(self.server['ip'])) try: # Create a DNS resolver query rsvr = dns.resolver.Resolver() rsvr.nameservers = [self.server['ip']] rsvr.lifetime = 5 rsvr.timeout = 5 qry = rsvr.query(self.domain, self.recType) # Get the results, sort for consistancy results = sorted([r.to_text() for r in qry]) success = True # Handle all the various exceptions except dns.resolver.NXDOMAIN: success = False results = ['NXDOMAIN'] except dns.resolver.NoNameservers: success = False results = ['No Nameservers'] except dns.resolver.NoAnswer: success = False results = ['No Answer'] except dns.resolver.Timeout: success = False results = ['Server Timeout'] # Save the results self.result = { 'server': self.server, 'results': results, 'success': success }
[ "def", "run", "(", "self", ")", ":", "logging", ".", "debug", "(", "\"Querying server {0}\"", ".", "format", "(", "self", ".", "server", "[", "'ip'", "]", ")", ")", "try", ":", "# Create a DNS resolver query", "rsvr", "=", "dns", ".", "resolver", ".", "R...
Do a single DNS query against a server
[ "Do", "a", "single", "DNS", "query", "against", "a", "server" ]
4734e36d712fefeb9a8ff22dfba678e382dde6cf
https://github.com/YoSmudge/dnsyo/blob/4734e36d712fefeb9a8ff22dfba678e382dde6cf/dnsyo/dnsyo.py#L460-L498
train
212,329
kislyuk/keymaker
keymaker/__init__.py
aws_to_unix_id
def aws_to_unix_id(aws_key_id): """Converts a AWS Key ID into a UID""" uid_bytes = hashlib.sha256(aws_key_id.encode()).digest()[-2:] if USING_PYTHON2: return 2000 + int(from_bytes(uid_bytes) // 2) else: return 2000 + (int.from_bytes(uid_bytes, byteorder=sys.byteorder) // 2)
python
def aws_to_unix_id(aws_key_id): """Converts a AWS Key ID into a UID""" uid_bytes = hashlib.sha256(aws_key_id.encode()).digest()[-2:] if USING_PYTHON2: return 2000 + int(from_bytes(uid_bytes) // 2) else: return 2000 + (int.from_bytes(uid_bytes, byteorder=sys.byteorder) // 2)
[ "def", "aws_to_unix_id", "(", "aws_key_id", ")", ":", "uid_bytes", "=", "hashlib", ".", "sha256", "(", "aws_key_id", ".", "encode", "(", ")", ")", ".", "digest", "(", ")", "[", "-", "2", ":", "]", "if", "USING_PYTHON2", ":", "return", "2000", "+", "i...
Converts a AWS Key ID into a UID
[ "Converts", "a", "AWS", "Key", "ID", "into", "a", "UID" ]
d9dc58b43376bd504ed2c11cde23cc78b43ba611
https://github.com/kislyuk/keymaker/blob/d9dc58b43376bd504ed2c11cde23cc78b43ba611/keymaker/__init__.py#L166-L172
train
212,330
ilevkivskyi/typing_inspect
typing_inspect.py
_gorg
def _gorg(cls): """This function exists for compatibility with old typing versions.""" assert isinstance(cls, GenericMeta) if hasattr(cls, '_gorg'): return cls._gorg while cls.__origin__ is not None: cls = cls.__origin__ return cls
python
def _gorg(cls): """This function exists for compatibility with old typing versions.""" assert isinstance(cls, GenericMeta) if hasattr(cls, '_gorg'): return cls._gorg while cls.__origin__ is not None: cls = cls.__origin__ return cls
[ "def", "_gorg", "(", "cls", ")", ":", "assert", "isinstance", "(", "cls", ",", "GenericMeta", ")", "if", "hasattr", "(", "cls", ",", "'_gorg'", ")", ":", "return", "cls", ".", "_gorg", "while", "cls", ".", "__origin__", "is", "not", "None", ":", "cls...
This function exists for compatibility with old typing versions.
[ "This", "function", "exists", "for", "compatibility", "with", "old", "typing", "versions", "." ]
fd81278cc440b6003f8298bcb22d5bc0f82ee3cd
https://github.com/ilevkivskyi/typing_inspect/blob/fd81278cc440b6003f8298bcb22d5bc0f82ee3cd/typing_inspect.py#L30-L37
train
212,331
ilevkivskyi/typing_inspect
typing_inspect.py
_eval_args
def _eval_args(args): """Internal helper for get_args.""" res = [] for arg in args: if not isinstance(arg, tuple): res.append(arg) elif is_callable_type(arg[0]): callable_args = _eval_args(arg[1:]) if len(arg) == 2: res.append(Callable[[], callable_args[0]]) elif arg[1] is Ellipsis: res.append(Callable[..., callable_args[1]]) else: res.append(Callable[list(callable_args[:-1]), callable_args[-1]]) else: res.append(type(arg[0]).__getitem__(arg[0], _eval_args(arg[1:]))) return tuple(res)
python
def _eval_args(args): """Internal helper for get_args.""" res = [] for arg in args: if not isinstance(arg, tuple): res.append(arg) elif is_callable_type(arg[0]): callable_args = _eval_args(arg[1:]) if len(arg) == 2: res.append(Callable[[], callable_args[0]]) elif arg[1] is Ellipsis: res.append(Callable[..., callable_args[1]]) else: res.append(Callable[list(callable_args[:-1]), callable_args[-1]]) else: res.append(type(arg[0]).__getitem__(arg[0], _eval_args(arg[1:]))) return tuple(res)
[ "def", "_eval_args", "(", "args", ")", ":", "res", "=", "[", "]", "for", "arg", "in", "args", ":", "if", "not", "isinstance", "(", "arg", ",", "tuple", ")", ":", "res", ".", "append", "(", "arg", ")", "elif", "is_callable_type", "(", "arg", "[", ...
Internal helper for get_args.
[ "Internal", "helper", "for", "get_args", "." ]
fd81278cc440b6003f8298bcb22d5bc0f82ee3cd
https://github.com/ilevkivskyi/typing_inspect/blob/fd81278cc440b6003f8298bcb22d5bc0f82ee3cd/typing_inspect.py#L282-L298
train
212,332
igvteam/igv-jupyter
igv/browser.py
Browser.on
def on(self, eventName, cb): """ Subscribe to an igv.js event. :param Name of the event. Currently only "locuschange" is supported. :type str :param cb - callback function taking a single argument. For the locuschange event this argument will contain a dictionary of the form {chr, start, end} :type function """ self.eventHandlers[eventName] = cb return self._send({ "id": self.igv_id, "command": "on", "eventName": eventName })
python
def on(self, eventName, cb): """ Subscribe to an igv.js event. :param Name of the event. Currently only "locuschange" is supported. :type str :param cb - callback function taking a single argument. For the locuschange event this argument will contain a dictionary of the form {chr, start, end} :type function """ self.eventHandlers[eventName] = cb return self._send({ "id": self.igv_id, "command": "on", "eventName": eventName })
[ "def", "on", "(", "self", ",", "eventName", ",", "cb", ")", ":", "self", ".", "eventHandlers", "[", "eventName", "]", "=", "cb", "return", "self", ".", "_send", "(", "{", "\"id\"", ":", "self", ".", "igv_id", ",", "\"command\"", ":", "\"on\"", ",", ...
Subscribe to an igv.js event. :param Name of the event. Currently only "locuschange" is supported. :type str :param cb - callback function taking a single argument. For the locuschange event this argument will contain a dictionary of the form {chr, start, end} :type function
[ "Subscribe", "to", "an", "igv", ".", "js", "event", "." ]
f93752ce507eae893c203325764551647e28a3dc
https://github.com/igvteam/igv-jupyter/blob/f93752ce507eae893c203325764551647e28a3dc/igv/browser.py#L151-L166
train
212,333
bcb/jsonrpcserver
jsonrpcserver/dispatcher.py
log_request
def log_request(request: str, trim_log_values: bool = False, **kwargs: Any) -> None: """Log a request""" return log_(request, request_logger, logging.INFO, trim=trim_log_values, **kwargs)
python
def log_request(request: str, trim_log_values: bool = False, **kwargs: Any) -> None: """Log a request""" return log_(request, request_logger, logging.INFO, trim=trim_log_values, **kwargs)
[ "def", "log_request", "(", "request", ":", "str", ",", "trim_log_values", ":", "bool", "=", "False", ",", "*", "*", "kwargs", ":", "Any", ")", "->", "None", ":", "return", "log_", "(", "request", ",", "request_logger", ",", "logging", ".", "INFO", ",",...
Log a request
[ "Log", "a", "request" ]
26bb70e868f81691816cabfc4b60a83428842b2f
https://github.com/bcb/jsonrpcserver/blob/26bb70e868f81691816cabfc4b60a83428842b2f/jsonrpcserver/dispatcher.py#L74-L76
train
212,334
bcb/jsonrpcserver
jsonrpcserver/dispatcher.py
log_response
def log_response(response: str, trim_log_values: bool = False, **kwargs: Any) -> None: """Log a response""" return log_(response, response_logger, logging.INFO, trim=trim_log_values, **kwargs)
python
def log_response(response: str, trim_log_values: bool = False, **kwargs: Any) -> None: """Log a response""" return log_(response, response_logger, logging.INFO, trim=trim_log_values, **kwargs)
[ "def", "log_response", "(", "response", ":", "str", ",", "trim_log_values", ":", "bool", "=", "False", ",", "*", "*", "kwargs", ":", "Any", ")", "->", "None", ":", "return", "log_", "(", "response", ",", "response_logger", ",", "logging", ".", "INFO", ...
Log a response
[ "Log", "a", "response" ]
26bb70e868f81691816cabfc4b60a83428842b2f
https://github.com/bcb/jsonrpcserver/blob/26bb70e868f81691816cabfc4b60a83428842b2f/jsonrpcserver/dispatcher.py#L79-L81
train
212,335
bcb/jsonrpcserver
jsonrpcserver/dispatcher.py
validate
def validate(request: Union[Dict, List], schema: dict) -> Union[Dict, List]: """ Wraps jsonschema.validate, returning the same object passed in. Args: request: The deserialized-from-json request. schema: The jsonschema schema to validate against. Raises: jsonschema.ValidationError """ jsonschema_validate(request, schema) return request
python
def validate(request: Union[Dict, List], schema: dict) -> Union[Dict, List]: """ Wraps jsonschema.validate, returning the same object passed in. Args: request: The deserialized-from-json request. schema: The jsonschema schema to validate against. Raises: jsonschema.ValidationError """ jsonschema_validate(request, schema) return request
[ "def", "validate", "(", "request", ":", "Union", "[", "Dict", ",", "List", "]", ",", "schema", ":", "dict", ")", "->", "Union", "[", "Dict", ",", "List", "]", ":", "jsonschema_validate", "(", "request", ",", "schema", ")", "return", "request" ]
Wraps jsonschema.validate, returning the same object passed in. Args: request: The deserialized-from-json request. schema: The jsonschema schema to validate against. Raises: jsonschema.ValidationError
[ "Wraps", "jsonschema", ".", "validate", "returning", "the", "same", "object", "passed", "in", "." ]
26bb70e868f81691816cabfc4b60a83428842b2f
https://github.com/bcb/jsonrpcserver/blob/26bb70e868f81691816cabfc4b60a83428842b2f/jsonrpcserver/dispatcher.py#L84-L96
train
212,336
bcb/jsonrpcserver
jsonrpcserver/dispatcher.py
call
def call(method: Method, *args: Any, **kwargs: Any) -> Any: """ Validates arguments and then calls the method. Args: method: The method to call. *args, **kwargs: Arguments to the method. Returns: The "result" part of the JSON-RPC response (the return value from the method). Raises: TypeError: If arguments don't match function signature. """ return validate_args(method, *args, **kwargs)(*args, **kwargs)
python
def call(method: Method, *args: Any, **kwargs: Any) -> Any: """ Validates arguments and then calls the method. Args: method: The method to call. *args, **kwargs: Arguments to the method. Returns: The "result" part of the JSON-RPC response (the return value from the method). Raises: TypeError: If arguments don't match function signature. """ return validate_args(method, *args, **kwargs)(*args, **kwargs)
[ "def", "call", "(", "method", ":", "Method", ",", "*", "args", ":", "Any", ",", "*", "*", "kwargs", ":", "Any", ")", "->", "Any", ":", "return", "validate_args", "(", "method", ",", "*", "args", ",", "*", "*", "kwargs", ")", "(", "*", "args", "...
Validates arguments and then calls the method. Args: method: The method to call. *args, **kwargs: Arguments to the method. Returns: The "result" part of the JSON-RPC response (the return value from the method). Raises: TypeError: If arguments don't match function signature.
[ "Validates", "arguments", "and", "then", "calls", "the", "method", "." ]
26bb70e868f81691816cabfc4b60a83428842b2f
https://github.com/bcb/jsonrpcserver/blob/26bb70e868f81691816cabfc4b60a83428842b2f/jsonrpcserver/dispatcher.py#L99-L113
train
212,337
bcb/jsonrpcserver
jsonrpcserver/dispatcher.py
safe_call
def safe_call(request: Request, methods: Methods, *, debug: bool) -> Response: """ Call a Request, catching exceptions to ensure we always return a Response. Args: request: The Request object. methods: The list of methods that can be called. debug: Include more information in error responses. Returns: A Response object. """ with handle_exceptions(request, debug) as handler: result = call(methods.items[request.method], *request.args, **request.kwargs) handler.response = SuccessResponse(result=result, id=request.id) return handler.response
python
def safe_call(request: Request, methods: Methods, *, debug: bool) -> Response: """ Call a Request, catching exceptions to ensure we always return a Response. Args: request: The Request object. methods: The list of methods that can be called. debug: Include more information in error responses. Returns: A Response object. """ with handle_exceptions(request, debug) as handler: result = call(methods.items[request.method], *request.args, **request.kwargs) handler.response = SuccessResponse(result=result, id=request.id) return handler.response
[ "def", "safe_call", "(", "request", ":", "Request", ",", "methods", ":", "Methods", ",", "*", ",", "debug", ":", "bool", ")", "->", "Response", ":", "with", "handle_exceptions", "(", "request", ",", "debug", ")", "as", "handler", ":", "result", "=", "c...
Call a Request, catching exceptions to ensure we always return a Response. Args: request: The Request object. methods: The list of methods that can be called. debug: Include more information in error responses. Returns: A Response object.
[ "Call", "a", "Request", "catching", "exceptions", "to", "ensure", "we", "always", "return", "a", "Response", "." ]
26bb70e868f81691816cabfc4b60a83428842b2f
https://github.com/bcb/jsonrpcserver/blob/26bb70e868f81691816cabfc4b60a83428842b2f/jsonrpcserver/dispatcher.py#L138-L153
train
212,338
bcb/jsonrpcserver
jsonrpcserver/dispatcher.py
call_requests
def call_requests( requests: Union[Request, Iterable[Request]], methods: Methods, debug: bool ) -> Response: """ Takes a request or list of Requests and calls them. Args: requests: Request object, or a collection of them. methods: The list of methods that can be called. debug: Include more information in error responses. """ if isinstance(requests, collections.Iterable): return BatchResponse(safe_call(r, methods, debug=debug) for r in requests) return safe_call(requests, methods, debug=debug)
python
def call_requests( requests: Union[Request, Iterable[Request]], methods: Methods, debug: bool ) -> Response: """ Takes a request or list of Requests and calls them. Args: requests: Request object, or a collection of them. methods: The list of methods that can be called. debug: Include more information in error responses. """ if isinstance(requests, collections.Iterable): return BatchResponse(safe_call(r, methods, debug=debug) for r in requests) return safe_call(requests, methods, debug=debug)
[ "def", "call_requests", "(", "requests", ":", "Union", "[", "Request", ",", "Iterable", "[", "Request", "]", "]", ",", "methods", ":", "Methods", ",", "debug", ":", "bool", ")", "->", "Response", ":", "if", "isinstance", "(", "requests", ",", "collection...
Takes a request or list of Requests and calls them. Args: requests: Request object, or a collection of them. methods: The list of methods that can be called. debug: Include more information in error responses.
[ "Takes", "a", "request", "or", "list", "of", "Requests", "and", "calls", "them", "." ]
26bb70e868f81691816cabfc4b60a83428842b2f
https://github.com/bcb/jsonrpcserver/blob/26bb70e868f81691816cabfc4b60a83428842b2f/jsonrpcserver/dispatcher.py#L156-L169
train
212,339
bcb/jsonrpcserver
jsonrpcserver/dispatcher.py
dispatch_pure
def dispatch_pure( request: str, methods: Methods, *, context: Any, convert_camel_case: bool, debug: bool, ) -> Response: """ Pure version of dispatch - no logging, no optional parameters. Does two things: 1. Deserializes and validates the string. 2. Calls each request. Args: request: The incoming request string. methods: Collection of methods that can be called. context: If specified, will be the first positional argument in all requests. convert_camel_case: Will convert the method name/any named params to snake case. debug: Include more information in error responses. Returns: A Response. """ try: deserialized = validate(deserialize(request), schema) except JSONDecodeError as exc: return InvalidJSONResponse(data=str(exc), debug=debug) except ValidationError as exc: return InvalidJSONRPCResponse(data=None, debug=debug) return call_requests( create_requests( deserialized, context=context, convert_camel_case=convert_camel_case ), methods, debug=debug, )
python
def dispatch_pure( request: str, methods: Methods, *, context: Any, convert_camel_case: bool, debug: bool, ) -> Response: """ Pure version of dispatch - no logging, no optional parameters. Does two things: 1. Deserializes and validates the string. 2. Calls each request. Args: request: The incoming request string. methods: Collection of methods that can be called. context: If specified, will be the first positional argument in all requests. convert_camel_case: Will convert the method name/any named params to snake case. debug: Include more information in error responses. Returns: A Response. """ try: deserialized = validate(deserialize(request), schema) except JSONDecodeError as exc: return InvalidJSONResponse(data=str(exc), debug=debug) except ValidationError as exc: return InvalidJSONRPCResponse(data=None, debug=debug) return call_requests( create_requests( deserialized, context=context, convert_camel_case=convert_camel_case ), methods, debug=debug, )
[ "def", "dispatch_pure", "(", "request", ":", "str", ",", "methods", ":", "Methods", ",", "*", ",", "context", ":", "Any", ",", "convert_camel_case", ":", "bool", ",", "debug", ":", "bool", ",", ")", "->", "Response", ":", "try", ":", "deserialized", "=...
Pure version of dispatch - no logging, no optional parameters. Does two things: 1. Deserializes and validates the string. 2. Calls each request. Args: request: The incoming request string. methods: Collection of methods that can be called. context: If specified, will be the first positional argument in all requests. convert_camel_case: Will convert the method name/any named params to snake case. debug: Include more information in error responses. Returns: A Response.
[ "Pure", "version", "of", "dispatch", "-", "no", "logging", "no", "optional", "parameters", "." ]
26bb70e868f81691816cabfc4b60a83428842b2f
https://github.com/bcb/jsonrpcserver/blob/26bb70e868f81691816cabfc4b60a83428842b2f/jsonrpcserver/dispatcher.py#L195-L231
train
212,340
bcb/jsonrpcserver
jsonrpcserver/server.py
serve
def serve(name: str = "", port: int = 5000) -> None: """ A basic way to serve the methods. Args: name: Server address. port: Server port. """ logging.info(" * Listening on port %s", port) httpd = HTTPServer((name, port), RequestHandler) httpd.serve_forever()
python
def serve(name: str = "", port: int = 5000) -> None: """ A basic way to serve the methods. Args: name: Server address. port: Server port. """ logging.info(" * Listening on port %s", port) httpd = HTTPServer((name, port), RequestHandler) httpd.serve_forever()
[ "def", "serve", "(", "name", ":", "str", "=", "\"\"", ",", "port", ":", "int", "=", "5000", ")", "->", "None", ":", "logging", ".", "info", "(", "\" * Listening on port %s\"", ",", "port", ")", "httpd", "=", "HTTPServer", "(", "(", "name", ",", "port...
A basic way to serve the methods. Args: name: Server address. port: Server port.
[ "A", "basic", "way", "to", "serve", "the", "methods", "." ]
26bb70e868f81691816cabfc4b60a83428842b2f
https://github.com/bcb/jsonrpcserver/blob/26bb70e868f81691816cabfc4b60a83428842b2f/jsonrpcserver/server.py#L21-L31
train
212,341
bcb/jsonrpcserver
jsonrpcserver/request.py
convert_camel_case_string
def convert_camel_case_string(name: str) -> str: """Convert camel case string to snake case""" string = re.sub("(.)([A-Z][a-z]+)", r"\1_\2", name) return re.sub("([a-z0-9])([A-Z])", r"\1_\2", string).lower()
python
def convert_camel_case_string(name: str) -> str: """Convert camel case string to snake case""" string = re.sub("(.)([A-Z][a-z]+)", r"\1_\2", name) return re.sub("([a-z0-9])([A-Z])", r"\1_\2", string).lower()
[ "def", "convert_camel_case_string", "(", "name", ":", "str", ")", "->", "str", ":", "string", "=", "re", ".", "sub", "(", "\"(.)([A-Z][a-z]+)\"", ",", "r\"\\1_\\2\"", ",", "name", ")", "return", "re", ".", "sub", "(", "\"([a-z0-9])([A-Z])\"", ",", "r\"\\1_\\...
Convert camel case string to snake case
[ "Convert", "camel", "case", "string", "to", "snake", "case" ]
26bb70e868f81691816cabfc4b60a83428842b2f
https://github.com/bcb/jsonrpcserver/blob/26bb70e868f81691816cabfc4b60a83428842b2f/jsonrpcserver/request.py#L18-L21
train
212,342
bcb/jsonrpcserver
jsonrpcserver/request.py
convert_camel_case_keys
def convert_camel_case_keys(original_dict: Dict[str, Any]) -> Dict[str, Any]: """Converts all keys of a dict from camel case to snake case, recursively""" new_dict = dict() for key, val in original_dict.items(): if isinstance(val, dict): # Recurse new_dict[convert_camel_case_string(key)] = convert_camel_case_keys(val) else: new_dict[convert_camel_case_string(key)] = val return new_dict
python
def convert_camel_case_keys(original_dict: Dict[str, Any]) -> Dict[str, Any]: """Converts all keys of a dict from camel case to snake case, recursively""" new_dict = dict() for key, val in original_dict.items(): if isinstance(val, dict): # Recurse new_dict[convert_camel_case_string(key)] = convert_camel_case_keys(val) else: new_dict[convert_camel_case_string(key)] = val return new_dict
[ "def", "convert_camel_case_keys", "(", "original_dict", ":", "Dict", "[", "str", ",", "Any", "]", ")", "->", "Dict", "[", "str", ",", "Any", "]", ":", "new_dict", "=", "dict", "(", ")", "for", "key", ",", "val", "in", "original_dict", ".", "items", "...
Converts all keys of a dict from camel case to snake case, recursively
[ "Converts", "all", "keys", "of", "a", "dict", "from", "camel", "case", "to", "snake", "case", "recursively" ]
26bb70e868f81691816cabfc4b60a83428842b2f
https://github.com/bcb/jsonrpcserver/blob/26bb70e868f81691816cabfc4b60a83428842b2f/jsonrpcserver/request.py#L24-L33
train
212,343
bcb/jsonrpcserver
jsonrpcserver/request.py
get_arguments
def get_arguments( params: Union[List, Dict, object] = NOPARAMS, context: Any = NOCONTEXT ) -> Tuple[List, Dict]: """ Get the positional and keyword arguments from a request. Takes the 'params' part of a JSON-RPC request and converts it to either positional or named arguments usable in a Python function call. Note that a JSON-RPC request can only have positional _or_ named arguments, but not both. See http://www.jsonrpc.org/specification#parameter_structures Args: params: The 'params' part of the JSON-RPC request (should be a list or dict). The 'params' value can be a JSON array (Python list), object (Python dict), or None. context: Optionally include some context data, which will be included as the first positional arguments passed to the method. Returns: A two-tuple containing the positional (in a list, or None) and named (in a dict, or None) arguments, extracted from the 'params' part of the request. """ positionals, nameds = [], {} # type: list, dict if params is not NOPARAMS: assert isinstance(params, (list, dict)) if isinstance(params, list): positionals, nameds = (params, {}) elif isinstance(params, dict): positionals, nameds = ([], params) # If context data was passed, include it as the first positional argument. if context is not NOCONTEXT: positionals = [context] + positionals return (positionals, nameds)
python
def get_arguments( params: Union[List, Dict, object] = NOPARAMS, context: Any = NOCONTEXT ) -> Tuple[List, Dict]: """ Get the positional and keyword arguments from a request. Takes the 'params' part of a JSON-RPC request and converts it to either positional or named arguments usable in a Python function call. Note that a JSON-RPC request can only have positional _or_ named arguments, but not both. See http://www.jsonrpc.org/specification#parameter_structures Args: params: The 'params' part of the JSON-RPC request (should be a list or dict). The 'params' value can be a JSON array (Python list), object (Python dict), or None. context: Optionally include some context data, which will be included as the first positional arguments passed to the method. Returns: A two-tuple containing the positional (in a list, or None) and named (in a dict, or None) arguments, extracted from the 'params' part of the request. """ positionals, nameds = [], {} # type: list, dict if params is not NOPARAMS: assert isinstance(params, (list, dict)) if isinstance(params, list): positionals, nameds = (params, {}) elif isinstance(params, dict): positionals, nameds = ([], params) # If context data was passed, include it as the first positional argument. if context is not NOCONTEXT: positionals = [context] + positionals return (positionals, nameds)
[ "def", "get_arguments", "(", "params", ":", "Union", "[", "List", ",", "Dict", ",", "object", "]", "=", "NOPARAMS", ",", "context", ":", "Any", "=", "NOCONTEXT", ")", "->", "Tuple", "[", "List", ",", "Dict", "]", ":", "positionals", ",", "nameds", "=...
Get the positional and keyword arguments from a request. Takes the 'params' part of a JSON-RPC request and converts it to either positional or named arguments usable in a Python function call. Note that a JSON-RPC request can only have positional _or_ named arguments, but not both. See http://www.jsonrpc.org/specification#parameter_structures Args: params: The 'params' part of the JSON-RPC request (should be a list or dict). The 'params' value can be a JSON array (Python list), object (Python dict), or None. context: Optionally include some context data, which will be included as the first positional arguments passed to the method. Returns: A two-tuple containing the positional (in a list, or None) and named (in a dict, or None) arguments, extracted from the 'params' part of the request.
[ "Get", "the", "positional", "and", "keyword", "arguments", "from", "a", "request", "." ]
26bb70e868f81691816cabfc4b60a83428842b2f
https://github.com/bcb/jsonrpcserver/blob/26bb70e868f81691816cabfc4b60a83428842b2f/jsonrpcserver/request.py#L36-L70
train
212,344
bcb/jsonrpcserver
jsonrpcserver/methods.py
validate_args
def validate_args(func: Method, *args: Any, **kwargs: Any) -> Method: """ Check if the request's arguments match a function's signature. Raises TypeError exception if arguments cannot be passed to a function. Args: func: The function to check. args: Positional arguments. kwargs: Keyword arguments. Raises: TypeError: If the arguments cannot be passed to the function. """ signature(func).bind(*args, **kwargs) return func
python
def validate_args(func: Method, *args: Any, **kwargs: Any) -> Method: """ Check if the request's arguments match a function's signature. Raises TypeError exception if arguments cannot be passed to a function. Args: func: The function to check. args: Positional arguments. kwargs: Keyword arguments. Raises: TypeError: If the arguments cannot be passed to the function. """ signature(func).bind(*args, **kwargs) return func
[ "def", "validate_args", "(", "func", ":", "Method", ",", "*", "args", ":", "Any", ",", "*", "*", "kwargs", ":", "Any", ")", "->", "Method", ":", "signature", "(", "func", ")", ".", "bind", "(", "*", "args", ",", "*", "*", "kwargs", ")", "return",...
Check if the request's arguments match a function's signature. Raises TypeError exception if arguments cannot be passed to a function. Args: func: The function to check. args: Positional arguments. kwargs: Keyword arguments. Raises: TypeError: If the arguments cannot be passed to the function.
[ "Check", "if", "the", "request", "s", "arguments", "match", "a", "function", "s", "signature", "." ]
26bb70e868f81691816cabfc4b60a83428842b2f
https://github.com/bcb/jsonrpcserver/blob/26bb70e868f81691816cabfc4b60a83428842b2f/jsonrpcserver/methods.py#L16-L31
train
212,345
bcb/jsonrpcserver
jsonrpcserver/methods.py
Methods.add
def add(self, *args: Any, **kwargs: Any) -> Optional[Callable]: """ Register a function to the list. Args: *args: Set/Sequence of positional arguments. **kwargs: Mapping of named arguments. Raises: AttributeError: Raised if the method being added has no name. (i.e. it has no `__name__` property, and no `name` argument was given.) Examples: methods = Methods() @methods.add def subtract(minuend, subtrahend): return minuend - subtrahend """ self.items = { **self.items, # Methods passed as positional args need a __name__ attribute, raises # AttributeError otherwise. **{m.__name__: validate(m) for m in args}, **{k: validate(v) for k, v in kwargs.items()}, } if len(args): return args[0] # for the decorator to work return None
python
def add(self, *args: Any, **kwargs: Any) -> Optional[Callable]: """ Register a function to the list. Args: *args: Set/Sequence of positional arguments. **kwargs: Mapping of named arguments. Raises: AttributeError: Raised if the method being added has no name. (i.e. it has no `__name__` property, and no `name` argument was given.) Examples: methods = Methods() @methods.add def subtract(minuend, subtrahend): return minuend - subtrahend """ self.items = { **self.items, # Methods passed as positional args need a __name__ attribute, raises # AttributeError otherwise. **{m.__name__: validate(m) for m in args}, **{k: validate(v) for k, v in kwargs.items()}, } if len(args): return args[0] # for the decorator to work return None
[ "def", "add", "(", "self", ",", "*", "args", ":", "Any", ",", "*", "*", "kwargs", ":", "Any", ")", "->", "Optional", "[", "Callable", "]", ":", "self", ".", "items", "=", "{", "*", "*", "self", ".", "items", ",", "# Methods passed as positional args ...
Register a function to the list. Args: *args: Set/Sequence of positional arguments. **kwargs: Mapping of named arguments. Raises: AttributeError: Raised if the method being added has no name. (i.e. it has no `__name__` property, and no `name` argument was given.) Examples: methods = Methods() @methods.add def subtract(minuend, subtrahend): return minuend - subtrahend
[ "Register", "a", "function", "to", "the", "list", "." ]
26bb70e868f81691816cabfc4b60a83428842b2f
https://github.com/bcb/jsonrpcserver/blob/26bb70e868f81691816cabfc4b60a83428842b2f/jsonrpcserver/methods.py#L46-L73
train
212,346
onicagroup/runway
runway/cli.py
fix_hyphen_commands
def fix_hyphen_commands(raw_cli_arguments): """Update options to match their module names with underscores.""" for i in ['gen-sample']: raw_cli_arguments[i.replace('-', '_')] = raw_cli_arguments[i] raw_cli_arguments.pop(i) return raw_cli_arguments
python
def fix_hyphen_commands(raw_cli_arguments): """Update options to match their module names with underscores.""" for i in ['gen-sample']: raw_cli_arguments[i.replace('-', '_')] = raw_cli_arguments[i] raw_cli_arguments.pop(i) return raw_cli_arguments
[ "def", "fix_hyphen_commands", "(", "raw_cli_arguments", ")", ":", "for", "i", "in", "[", "'gen-sample'", "]", ":", "raw_cli_arguments", "[", "i", ".", "replace", "(", "'-'", ",", "'_'", ")", "]", "=", "raw_cli_arguments", "[", "i", "]", "raw_cli_arguments", ...
Update options to match their module names with underscores.
[ "Update", "options", "to", "match", "their", "module", "names", "with", "underscores", "." ]
3f3549ec3bf6e39b9f27d9738a1847f3a4369e7f
https://github.com/onicagroup/runway/blob/3f3549ec3bf6e39b9f27d9738a1847f3a4369e7f/runway/cli.py#L43-L48
train
212,347
onicagroup/runway
runway/cli.py
main
def main(): """Provide main CLI entrypoint.""" if os.environ.get('DEBUG'): logging.basicConfig(level=logging.DEBUG) else: logging.basicConfig(level=logging.INFO) # botocore info is spammy logging.getLogger('botocore').setLevel(logging.ERROR) cli_arguments = fix_hyphen_commands(docopt(__doc__, version=version)) # at least one of these must be enabled, i.e. the value is 'True'... but unfortunately # `docopts` doesn't give you the hierarchy... so given 'gen-sample cfn', there are # TWO enabled items in the list, 'gen-sample' and 'cfn' possible_commands = [command for command, enabled in cli_arguments.items() if enabled] command_class = find_command_class(possible_commands) if command_class: command_class(cli_arguments).execute() else: LOGGER.error("class not found for command '%s'", possible_commands)
python
def main(): """Provide main CLI entrypoint.""" if os.environ.get('DEBUG'): logging.basicConfig(level=logging.DEBUG) else: logging.basicConfig(level=logging.INFO) # botocore info is spammy logging.getLogger('botocore').setLevel(logging.ERROR) cli_arguments = fix_hyphen_commands(docopt(__doc__, version=version)) # at least one of these must be enabled, i.e. the value is 'True'... but unfortunately # `docopts` doesn't give you the hierarchy... so given 'gen-sample cfn', there are # TWO enabled items in the list, 'gen-sample' and 'cfn' possible_commands = [command for command, enabled in cli_arguments.items() if enabled] command_class = find_command_class(possible_commands) if command_class: command_class(cli_arguments).execute() else: LOGGER.error("class not found for command '%s'", possible_commands)
[ "def", "main", "(", ")", ":", "if", "os", ".", "environ", ".", "get", "(", "'DEBUG'", ")", ":", "logging", ".", "basicConfig", "(", "level", "=", "logging", ".", "DEBUG", ")", "else", ":", "logging", ".", "basicConfig", "(", "level", "=", "logging", ...
Provide main CLI entrypoint.
[ "Provide", "main", "CLI", "entrypoint", "." ]
3f3549ec3bf6e39b9f27d9738a1847f3a4369e7f
https://github.com/onicagroup/runway/blob/3f3549ec3bf6e39b9f27d9738a1847f3a4369e7f/runway/cli.py#L51-L71
train
212,348
onicagroup/runway
runway/hooks/staticsite/util.py
calculate_hash_of_files
def calculate_hash_of_files(files, root): """Return a hash of all of the given files at the given root. Adapted from stacker.hooks.aws_lambda; used according to its license: https://github.com/cloudtools/stacker/blob/1.4.0/LICENSE Args: files (list[str]): file names to include in the hash calculation, relative to ``root``. root (str): base directory to analyze files in. Returns: str: A hash of the hashes of the given files. """ file_hash = hashlib.md5() for fname in sorted(files): fileobj = os.path.join(root, fname) file_hash.update((fname + "\0").encode()) with open(fileobj, "rb") as filedes: for chunk in iter(lambda: filedes.read(4096), ""): # noqa pylint: disable=cell-var-from-loop if not chunk: break file_hash.update(chunk) file_hash.update("\0".encode()) return file_hash.hexdigest()
python
def calculate_hash_of_files(files, root): """Return a hash of all of the given files at the given root. Adapted from stacker.hooks.aws_lambda; used according to its license: https://github.com/cloudtools/stacker/blob/1.4.0/LICENSE Args: files (list[str]): file names to include in the hash calculation, relative to ``root``. root (str): base directory to analyze files in. Returns: str: A hash of the hashes of the given files. """ file_hash = hashlib.md5() for fname in sorted(files): fileobj = os.path.join(root, fname) file_hash.update((fname + "\0").encode()) with open(fileobj, "rb") as filedes: for chunk in iter(lambda: filedes.read(4096), ""): # noqa pylint: disable=cell-var-from-loop if not chunk: break file_hash.update(chunk) file_hash.update("\0".encode()) return file_hash.hexdigest()
[ "def", "calculate_hash_of_files", "(", "files", ",", "root", ")", ":", "file_hash", "=", "hashlib", ".", "md5", "(", ")", "for", "fname", "in", "sorted", "(", "files", ")", ":", "fileobj", "=", "os", ".", "path", ".", "join", "(", "root", ",", "fname...
Return a hash of all of the given files at the given root. Adapted from stacker.hooks.aws_lambda; used according to its license: https://github.com/cloudtools/stacker/blob/1.4.0/LICENSE Args: files (list[str]): file names to include in the hash calculation, relative to ``root``. root (str): base directory to analyze files in. Returns: str: A hash of the hashes of the given files.
[ "Return", "a", "hash", "of", "all", "of", "the", "given", "files", "at", "the", "given", "root", "." ]
3f3549ec3bf6e39b9f27d9738a1847f3a4369e7f
https://github.com/onicagroup/runway/blob/3f3549ec3bf6e39b9f27d9738a1847f3a4369e7f/runway/hooks/staticsite/util.py#L14-L39
train
212,349
onicagroup/runway
runway/hooks/staticsite/util.py
get_hash_of_files
def get_hash_of_files(root_path, directories=None): """Generate md5 hash of files.""" if not directories: directories = [{'path': './'}] files_to_hash = [] for i in directories: ignorer = get_ignorer(os.path.join(root_path, i['path']), i.get('exclusions')) with change_dir(root_path): for root, dirs, files in os.walk(i['path'], topdown=True): if (root != './') and ignorer.is_ignored(root, True): dirs[:] = [] files[:] = [] else: for filename in files: filepath = os.path.join(root, filename) if not ignorer.is_ignored(filepath): files_to_hash.append( filepath[2:] if filepath.startswith('./') else filepath # noqa ) return calculate_hash_of_files(files_to_hash, root_path)
python
def get_hash_of_files(root_path, directories=None): """Generate md5 hash of files.""" if not directories: directories = [{'path': './'}] files_to_hash = [] for i in directories: ignorer = get_ignorer(os.path.join(root_path, i['path']), i.get('exclusions')) with change_dir(root_path): for root, dirs, files in os.walk(i['path'], topdown=True): if (root != './') and ignorer.is_ignored(root, True): dirs[:] = [] files[:] = [] else: for filename in files: filepath = os.path.join(root, filename) if not ignorer.is_ignored(filepath): files_to_hash.append( filepath[2:] if filepath.startswith('./') else filepath # noqa ) return calculate_hash_of_files(files_to_hash, root_path)
[ "def", "get_hash_of_files", "(", "root_path", ",", "directories", "=", "None", ")", ":", "if", "not", "directories", ":", "directories", "=", "[", "{", "'path'", ":", "'./'", "}", "]", "files_to_hash", "=", "[", "]", "for", "i", "in", "directories", ":",...
Generate md5 hash of files.
[ "Generate", "md5", "hash", "of", "files", "." ]
3f3549ec3bf6e39b9f27d9738a1847f3a4369e7f
https://github.com/onicagroup/runway/blob/3f3549ec3bf6e39b9f27d9738a1847f3a4369e7f/runway/hooks/staticsite/util.py#L42-L65
train
212,350
onicagroup/runway
runway/hooks/staticsite/util.py
get_ignorer
def get_ignorer(path, additional_exclusions=None): """Create ignorer with directory gitignore file.""" ignorefile = zgitignore.ZgitIgnore() gitignore_file = os.path.join(path, '.gitignore') if os.path.isfile(gitignore_file): with open(gitignore_file, 'r') as fileobj: ignorefile.add_patterns(fileobj.read().splitlines()) if additional_exclusions is not None: ignorefile.add_patterns(additional_exclusions) return ignorefile
python
def get_ignorer(path, additional_exclusions=None): """Create ignorer with directory gitignore file.""" ignorefile = zgitignore.ZgitIgnore() gitignore_file = os.path.join(path, '.gitignore') if os.path.isfile(gitignore_file): with open(gitignore_file, 'r') as fileobj: ignorefile.add_patterns(fileobj.read().splitlines()) if additional_exclusions is not None: ignorefile.add_patterns(additional_exclusions) return ignorefile
[ "def", "get_ignorer", "(", "path", ",", "additional_exclusions", "=", "None", ")", ":", "ignorefile", "=", "zgitignore", ".", "ZgitIgnore", "(", ")", "gitignore_file", "=", "os", ".", "path", ".", "join", "(", "path", ",", "'.gitignore'", ")", "if", "os", ...
Create ignorer with directory gitignore file.
[ "Create", "ignorer", "with", "directory", "gitignore", "file", "." ]
3f3549ec3bf6e39b9f27d9738a1847f3a4369e7f
https://github.com/onicagroup/runway/blob/3f3549ec3bf6e39b9f27d9738a1847f3a4369e7f/runway/hooks/staticsite/util.py#L68-L79
train
212,351
onicagroup/runway
runway/tfenv.py
download_tf_release
def download_tf_release(version, versions_dir, command_suffix, tf_platform=None, arch=None): """Download Terraform archive and return path to it.""" version_dir = os.path.join(versions_dir, version) if arch is None: arch = ( os.environ.get('TFENV_ARCH') if os.environ.get('TFENV_ARCH') else 'amd64') if tf_platform: tfver_os = tf_platform + '_' + arch else: if platform.system().startswith('Darwin'): tfver_os = "darwin_%s" % arch elif platform.system().startswith('MINGW64') or ( platform.system().startswith('MSYS_NT') or ( platform.system().startswith('CYGWIN_NT'))): tfver_os = "windows_%s" % arch else: tfver_os = "linux_%s" % arch download_dir = tempfile.mkdtemp() filename = "terraform_%s_%s.zip" % (version, tfver_os) shasums_name = "terraform_%s_SHA256SUMS" % version tf_url = "https://releases.hashicorp.com/terraform/" + version for i in [filename, shasums_name]: urlretrieve(tf_url + '/' + i, os.path.join(download_dir, i)) tf_hash = get_hash_for_filename(filename, os.path.join(download_dir, shasums_name)) if tf_hash != sha256sum(os.path.join(download_dir, filename)): LOGGER.error("Downloaded Terraform %s does not match sha256 %s", filename, tf_hash) sys.exit(1) tf_zipfile = zipfile.ZipFile(os.path.join(download_dir, filename)) os.mkdir(version_dir) tf_zipfile.extractall(version_dir) tf_zipfile.close() shutil.rmtree(download_dir) os.chmod( # ensure it is executable os.path.join(version_dir, 'terraform' + command_suffix), os.stat(os.path.join(version_dir, 'terraform' + command_suffix)).st_mode | 0o0111 )
python
def download_tf_release(version, versions_dir, command_suffix, tf_platform=None, arch=None): """Download Terraform archive and return path to it.""" version_dir = os.path.join(versions_dir, version) if arch is None: arch = ( os.environ.get('TFENV_ARCH') if os.environ.get('TFENV_ARCH') else 'amd64') if tf_platform: tfver_os = tf_platform + '_' + arch else: if platform.system().startswith('Darwin'): tfver_os = "darwin_%s" % arch elif platform.system().startswith('MINGW64') or ( platform.system().startswith('MSYS_NT') or ( platform.system().startswith('CYGWIN_NT'))): tfver_os = "windows_%s" % arch else: tfver_os = "linux_%s" % arch download_dir = tempfile.mkdtemp() filename = "terraform_%s_%s.zip" % (version, tfver_os) shasums_name = "terraform_%s_SHA256SUMS" % version tf_url = "https://releases.hashicorp.com/terraform/" + version for i in [filename, shasums_name]: urlretrieve(tf_url + '/' + i, os.path.join(download_dir, i)) tf_hash = get_hash_for_filename(filename, os.path.join(download_dir, shasums_name)) if tf_hash != sha256sum(os.path.join(download_dir, filename)): LOGGER.error("Downloaded Terraform %s does not match sha256 %s", filename, tf_hash) sys.exit(1) tf_zipfile = zipfile.ZipFile(os.path.join(download_dir, filename)) os.mkdir(version_dir) tf_zipfile.extractall(version_dir) tf_zipfile.close() shutil.rmtree(download_dir) os.chmod( # ensure it is executable os.path.join(version_dir, 'terraform' + command_suffix), os.stat(os.path.join(version_dir, 'terraform' + command_suffix)).st_mode | 0o0111 )
[ "def", "download_tf_release", "(", "version", ",", "versions_dir", ",", "command_suffix", ",", "tf_platform", "=", "None", ",", "arch", "=", "None", ")", ":", "version_dir", "=", "os", ".", "path", ".", "join", "(", "versions_dir", ",", "version", ")", "if...
Download Terraform archive and return path to it.
[ "Download", "Terraform", "archive", "and", "return", "path", "to", "it", "." ]
3f3549ec3bf6e39b9f27d9738a1847f3a4369e7f
https://github.com/onicagroup/runway/blob/3f3549ec3bf6e39b9f27d9738a1847f3a4369e7f/runway/tfenv.py#L25-L73
train
212,352
onicagroup/runway
runway/tfenv.py
get_available_tf_versions
def get_available_tf_versions(include_prerelease=False): """Return available Terraform versions.""" tf_releases = json.loads( requests.get('https://releases.hashicorp.com/index.json').text )['terraform'] tf_versions = sorted([k # descending for k, _v in tf_releases['versions'].items()], key=LooseVersion, reverse=True) if include_prerelease: return tf_versions return [i for i in tf_versions if '-' not in i]
python
def get_available_tf_versions(include_prerelease=False): """Return available Terraform versions.""" tf_releases = json.loads( requests.get('https://releases.hashicorp.com/index.json').text )['terraform'] tf_versions = sorted([k # descending for k, _v in tf_releases['versions'].items()], key=LooseVersion, reverse=True) if include_prerelease: return tf_versions return [i for i in tf_versions if '-' not in i]
[ "def", "get_available_tf_versions", "(", "include_prerelease", "=", "False", ")", ":", "tf_releases", "=", "json", ".", "loads", "(", "requests", ".", "get", "(", "'https://releases.hashicorp.com/index.json'", ")", ".", "text", ")", "[", "'terraform'", "]", "tf_ve...
Return available Terraform versions.
[ "Return", "available", "Terraform", "versions", "." ]
3f3549ec3bf6e39b9f27d9738a1847f3a4369e7f
https://github.com/onicagroup/runway/blob/3f3549ec3bf6e39b9f27d9738a1847f3a4369e7f/runway/tfenv.py#L76-L87
train
212,353
onicagroup/runway
runway/tfenv.py
find_min_required
def find_min_required(path): """Inspect terraform files and find minimum version.""" found_min_required = '' for filename in glob.glob(os.path.join(path, '*.tf')): with open(filename, 'r') as stream: tf_config = hcl.load(stream) if tf_config.get('terraform', {}).get('required_version'): found_min_required = tf_config.get('terraform', {}).get('required_version') break if found_min_required: if re.match(r'^!=.+', found_min_required): LOGGER.error('Min required Terraform version is a negation (%s) ' '- unable to determine required version', found_min_required) sys.exit(1) else: found_min_required = re.search(r'[0-9]*\.[0-9]*(?:\.[0-9]*)?', found_min_required).group(0) LOGGER.debug("Detected minimum terraform version is %s", found_min_required) return found_min_required LOGGER.error('Terraform version specified as min-required, but unable to ' 'find a specified version requirement in this module\'s tf ' 'files') sys.exit(1)
python
def find_min_required(path): """Inspect terraform files and find minimum version.""" found_min_required = '' for filename in glob.glob(os.path.join(path, '*.tf')): with open(filename, 'r') as stream: tf_config = hcl.load(stream) if tf_config.get('terraform', {}).get('required_version'): found_min_required = tf_config.get('terraform', {}).get('required_version') break if found_min_required: if re.match(r'^!=.+', found_min_required): LOGGER.error('Min required Terraform version is a negation (%s) ' '- unable to determine required version', found_min_required) sys.exit(1) else: found_min_required = re.search(r'[0-9]*\.[0-9]*(?:\.[0-9]*)?', found_min_required).group(0) LOGGER.debug("Detected minimum terraform version is %s", found_min_required) return found_min_required LOGGER.error('Terraform version specified as min-required, but unable to ' 'find a specified version requirement in this module\'s tf ' 'files') sys.exit(1)
[ "def", "find_min_required", "(", "path", ")", ":", "found_min_required", "=", "''", "for", "filename", "in", "glob", ".", "glob", "(", "os", ".", "path", ".", "join", "(", "path", ",", "'*.tf'", ")", ")", ":", "with", "open", "(", "filename", ",", "'...
Inspect terraform files and find minimum version.
[ "Inspect", "terraform", "files", "and", "find", "minimum", "version", "." ]
3f3549ec3bf6e39b9f27d9738a1847f3a4369e7f
https://github.com/onicagroup/runway/blob/3f3549ec3bf6e39b9f27d9738a1847f3a4369e7f/runway/tfenv.py#L98-L124
train
212,354
onicagroup/runway
runway/tfenv.py
get_version_requested
def get_version_requested(path): """Return string listing requested Terraform version.""" tf_version_path = os.path.join(path, TF_VERSION_FILENAME) if not os.path.isfile(tf_version_path): LOGGER.error("Terraform install attempted and no %s file present to " "dictate the version. Please create it (e.g. write " "\"0.11.13\" (without quotes) to the file and try again", TF_VERSION_FILENAME) sys.exit(1) with open(tf_version_path, 'r') as stream: ver = stream.read().rstrip() return ver
python
def get_version_requested(path): """Return string listing requested Terraform version.""" tf_version_path = os.path.join(path, TF_VERSION_FILENAME) if not os.path.isfile(tf_version_path): LOGGER.error("Terraform install attempted and no %s file present to " "dictate the version. Please create it (e.g. write " "\"0.11.13\" (without quotes) to the file and try again", TF_VERSION_FILENAME) sys.exit(1) with open(tf_version_path, 'r') as stream: ver = stream.read().rstrip() return ver
[ "def", "get_version_requested", "(", "path", ")", ":", "tf_version_path", "=", "os", ".", "path", ".", "join", "(", "path", ",", "TF_VERSION_FILENAME", ")", "if", "not", "os", ".", "path", ".", "isfile", "(", "tf_version_path", ")", ":", "LOGGER", ".", "...
Return string listing requested Terraform version.
[ "Return", "string", "listing", "requested", "Terraform", "version", "." ]
3f3549ec3bf6e39b9f27d9738a1847f3a4369e7f
https://github.com/onicagroup/runway/blob/3f3549ec3bf6e39b9f27d9738a1847f3a4369e7f/runway/tfenv.py#L127-L139
train
212,355
onicagroup/runway
runway/tfenv.py
ensure_versions_dir_exists
def ensure_versions_dir_exists(tfenv_path): """Ensure versions directory is available.""" versions_dir = os.path.join(tfenv_path, 'versions') if not os.path.isdir(tfenv_path): os.mkdir(tfenv_path) if not os.path.isdir(versions_dir): os.mkdir(versions_dir) return versions_dir
python
def ensure_versions_dir_exists(tfenv_path): """Ensure versions directory is available.""" versions_dir = os.path.join(tfenv_path, 'versions') if not os.path.isdir(tfenv_path): os.mkdir(tfenv_path) if not os.path.isdir(versions_dir): os.mkdir(versions_dir) return versions_dir
[ "def", "ensure_versions_dir_exists", "(", "tfenv_path", ")", ":", "versions_dir", "=", "os", ".", "path", ".", "join", "(", "tfenv_path", ",", "'versions'", ")", "if", "not", "os", ".", "path", ".", "isdir", "(", "tfenv_path", ")", ":", "os", ".", "mkdir...
Ensure versions directory is available.
[ "Ensure", "versions", "directory", "is", "available", "." ]
3f3549ec3bf6e39b9f27d9738a1847f3a4369e7f
https://github.com/onicagroup/runway/blob/3f3549ec3bf6e39b9f27d9738a1847f3a4369e7f/runway/tfenv.py#L142-L149
train
212,356
onicagroup/runway
runway/tfenv.py
TFEnv.install
def install(self, version_requested=None): """Ensure terraform is available.""" command_suffix = '.exe' if platform.system() == 'Windows' else '' versions_dir = ensure_versions_dir_exists(self.tfenv_dir) if not version_requested: version_requested = get_version_requested(self.path) if re.match(r'^min-required$', version_requested): LOGGER.debug('tfenv: detecting minimal required version') version_requested = find_min_required(self.path) if re.match(r'^latest:.*$', version_requested): regex = re.search(r'latest:(.*)', version_requested).group(1) include_prerelease_versions = False elif re.match(r'^latest$', version_requested): regex = r'^[0-9]+\.[0-9]+\.[0-9]+$' include_prerelease_versions = False else: regex = "^%s$" % version_requested include_prerelease_versions = True # Return early (i.e before reaching out to the internet) if the # matching version is already installed if os.path.isdir(os.path.join(versions_dir, version_requested)): LOGGER.info("Terraform version %s already installed; using " "it...", version_requested) return os.path.join(versions_dir, version_requested, 'terraform') + command_suffix try: version = next(i for i in get_available_tf_versions( include_prerelease_versions) if re.match(regex, i)) except StopIteration: LOGGER.error("Unable to find a Terraform version matching regex: %s", regex) sys.exit(1) # Now that a version has been selected, skip downloading if it's # already been downloaded if os.path.isdir(os.path.join(versions_dir, version)): LOGGER.info("Terraform version %s already installed; using it...", version) return os.path.join(versions_dir, version, 'terraform') + command_suffix LOGGER.info("Downloading and using Terraform version %s ...", version) download_tf_release(version, versions_dir, command_suffix) LOGGER.info("Downloaded Terraform %s successfully", version) return os.path.join(versions_dir, version, 'terraform') + command_suffix
python
def install(self, version_requested=None): """Ensure terraform is available.""" command_suffix = '.exe' if platform.system() == 'Windows' else '' versions_dir = ensure_versions_dir_exists(self.tfenv_dir) if not version_requested: version_requested = get_version_requested(self.path) if re.match(r'^min-required$', version_requested): LOGGER.debug('tfenv: detecting minimal required version') version_requested = find_min_required(self.path) if re.match(r'^latest:.*$', version_requested): regex = re.search(r'latest:(.*)', version_requested).group(1) include_prerelease_versions = False elif re.match(r'^latest$', version_requested): regex = r'^[0-9]+\.[0-9]+\.[0-9]+$' include_prerelease_versions = False else: regex = "^%s$" % version_requested include_prerelease_versions = True # Return early (i.e before reaching out to the internet) if the # matching version is already installed if os.path.isdir(os.path.join(versions_dir, version_requested)): LOGGER.info("Terraform version %s already installed; using " "it...", version_requested) return os.path.join(versions_dir, version_requested, 'terraform') + command_suffix try: version = next(i for i in get_available_tf_versions( include_prerelease_versions) if re.match(regex, i)) except StopIteration: LOGGER.error("Unable to find a Terraform version matching regex: %s", regex) sys.exit(1) # Now that a version has been selected, skip downloading if it's # already been downloaded if os.path.isdir(os.path.join(versions_dir, version)): LOGGER.info("Terraform version %s already installed; using it...", version) return os.path.join(versions_dir, version, 'terraform') + command_suffix LOGGER.info("Downloading and using Terraform version %s ...", version) download_tf_release(version, versions_dir, command_suffix) LOGGER.info("Downloaded Terraform %s successfully", version) return os.path.join(versions_dir, version, 'terraform') + command_suffix
[ "def", "install", "(", "self", ",", "version_requested", "=", "None", ")", ":", "command_suffix", "=", "'.exe'", "if", "platform", ".", "system", "(", ")", "==", "'Windows'", "else", "''", "versions_dir", "=", "ensure_versions_dir_exists", "(", "self", ".", ...
Ensure terraform is available.
[ "Ensure", "terraform", "is", "available", "." ]
3f3549ec3bf6e39b9f27d9738a1847f3a4369e7f
https://github.com/onicagroup/runway/blob/3f3549ec3bf6e39b9f27d9738a1847f3a4369e7f/runway/tfenv.py#L183-L238
train
212,357
onicagroup/runway
runway/hooks/cleanup_ssm.py
delete_param
def delete_param(context, provider, **kwargs): # noqa pylint: disable=unused-argument """Delete SSM parameter.""" parameter_name = kwargs.get('parameter_name') if not parameter_name: raise ValueError('Must specify `parameter_name` for delete_param ' 'hook.') session = get_session(provider.region) ssm_client = session.client('ssm') try: ssm_client.delete_parameter(Name=parameter_name) except ssm_client.exceptions.ParameterNotFound: LOGGER.info("%s parameter appears to have already been deleted...", parameter_name) return True
python
def delete_param(context, provider, **kwargs): # noqa pylint: disable=unused-argument """Delete SSM parameter.""" parameter_name = kwargs.get('parameter_name') if not parameter_name: raise ValueError('Must specify `parameter_name` for delete_param ' 'hook.') session = get_session(provider.region) ssm_client = session.client('ssm') try: ssm_client.delete_parameter(Name=parameter_name) except ssm_client.exceptions.ParameterNotFound: LOGGER.info("%s parameter appears to have already been deleted...", parameter_name) return True
[ "def", "delete_param", "(", "context", ",", "provider", ",", "*", "*", "kwargs", ")", ":", "# noqa pylint: disable=unused-argument", "parameter_name", "=", "kwargs", ".", "get", "(", "'parameter_name'", ")", "if", "not", "parameter_name", ":", "raise", "ValueError...
Delete SSM parameter.
[ "Delete", "SSM", "parameter", "." ]
3f3549ec3bf6e39b9f27d9738a1847f3a4369e7f
https://github.com/onicagroup/runway/blob/3f3549ec3bf6e39b9f27d9738a1847f3a4369e7f/runway/hooks/cleanup_ssm.py#L10-L25
train
212,358
onicagroup/runway
runway/context.py
Context.save_existing_iam_env_vars
def save_existing_iam_env_vars(self): """Backup IAM environment variables for later restoration.""" for i in ['AWS_ACCESS_KEY_ID', 'AWS_SECRET_ACCESS_KEY', 'AWS_SESSION_TOKEN']: if i in self.env_vars: self.env_vars['OLD_' + i] = self.env_vars[i]
python
def save_existing_iam_env_vars(self): """Backup IAM environment variables for later restoration.""" for i in ['AWS_ACCESS_KEY_ID', 'AWS_SECRET_ACCESS_KEY', 'AWS_SESSION_TOKEN']: if i in self.env_vars: self.env_vars['OLD_' + i] = self.env_vars[i]
[ "def", "save_existing_iam_env_vars", "(", "self", ")", ":", "for", "i", "in", "[", "'AWS_ACCESS_KEY_ID'", ",", "'AWS_SECRET_ACCESS_KEY'", ",", "'AWS_SESSION_TOKEN'", "]", ":", "if", "i", "in", "self", ".", "env_vars", ":", "self", ".", "env_vars", "[", "'OLD_'...
Backup IAM environment variables for later restoration.
[ "Backup", "IAM", "environment", "variables", "for", "later", "restoration", "." ]
3f3549ec3bf6e39b9f27d9738a1847f3a4369e7f
https://github.com/onicagroup/runway/blob/3f3549ec3bf6e39b9f27d9738a1847f3a4369e7f/runway/context.py#L21-L26
train
212,359
onicagroup/runway
runway/context.py
Context.restore_existing_iam_env_vars
def restore_existing_iam_env_vars(self): """Restore backed up IAM environment variables.""" for i in ['AWS_ACCESS_KEY_ID', 'AWS_SECRET_ACCESS_KEY', 'AWS_SESSION_TOKEN']: if 'OLD_' + i in self.env_vars: self.env_vars[i] = self.env_vars['OLD_' + i] elif i in self.env_vars: self.env_vars.pop(i)
python
def restore_existing_iam_env_vars(self): """Restore backed up IAM environment variables.""" for i in ['AWS_ACCESS_KEY_ID', 'AWS_SECRET_ACCESS_KEY', 'AWS_SESSION_TOKEN']: if 'OLD_' + i in self.env_vars: self.env_vars[i] = self.env_vars['OLD_' + i] elif i in self.env_vars: self.env_vars.pop(i)
[ "def", "restore_existing_iam_env_vars", "(", "self", ")", ":", "for", "i", "in", "[", "'AWS_ACCESS_KEY_ID'", ",", "'AWS_SECRET_ACCESS_KEY'", ",", "'AWS_SESSION_TOKEN'", "]", ":", "if", "'OLD_'", "+", "i", "in", "self", ".", "env_vars", ":", "self", ".", "env_v...
Restore backed up IAM environment variables.
[ "Restore", "backed", "up", "IAM", "environment", "variables", "." ]
3f3549ec3bf6e39b9f27d9738a1847f3a4369e7f
https://github.com/onicagroup/runway/blob/3f3549ec3bf6e39b9f27d9738a1847f3a4369e7f/runway/context.py#L28-L35
train
212,360
onicagroup/runway
runway/templates/serverless/handler.py
hello
def hello(event, context): # pylint: disable=unused-argument """Return Serverless Hello World.""" body = { "message": "Go Serverless v1.0! Your function executed successfully!", "input": event } response = { "statusCode": 200, "body": json.dumps(body) } return response
python
def hello(event, context): # pylint: disable=unused-argument """Return Serverless Hello World.""" body = { "message": "Go Serverless v1.0! Your function executed successfully!", "input": event } response = { "statusCode": 200, "body": json.dumps(body) } return response
[ "def", "hello", "(", "event", ",", "context", ")", ":", "# pylint: disable=unused-argument", "body", "=", "{", "\"message\"", ":", "\"Go Serverless v1.0! Your function executed successfully!\"", ",", "\"input\"", ":", "event", "}", "response", "=", "{", "\"statusCode\""...
Return Serverless Hello World.
[ "Return", "Serverless", "Hello", "World", "." ]
3f3549ec3bf6e39b9f27d9738a1847f3a4369e7f
https://github.com/onicagroup/runway/blob/3f3549ec3bf6e39b9f27d9738a1847f3a4369e7f/runway/templates/serverless/handler.py#L5-L17
train
212,361
onicagroup/runway
runway/commands/runway_command.py
get_env
def get_env(path, ignore_git_branch=False): """Determine environment name.""" if 'DEPLOY_ENVIRONMENT' in os.environ: return os.environ['DEPLOY_ENVIRONMENT'] if ignore_git_branch: LOGGER.info('Skipping environment lookup from current git branch ' '("ignore_git_branch" is set to true in the runway ' 'config)') else: # These are not located with the top imports because they throw an # error if git isn't installed from git import Repo as GitRepo from git.exc import InvalidGitRepositoryError try: b_name = GitRepo( path, search_parent_directories=True ).active_branch.name LOGGER.info('Deriving environment name from git branch %s...', b_name) return get_env_from_branch(b_name) except InvalidGitRepositoryError: pass LOGGER.info('Deriving environment name from directory %s...', path) return get_env_from_directory(os.path.basename(path))
python
def get_env(path, ignore_git_branch=False): """Determine environment name.""" if 'DEPLOY_ENVIRONMENT' in os.environ: return os.environ['DEPLOY_ENVIRONMENT'] if ignore_git_branch: LOGGER.info('Skipping environment lookup from current git branch ' '("ignore_git_branch" is set to true in the runway ' 'config)') else: # These are not located with the top imports because they throw an # error if git isn't installed from git import Repo as GitRepo from git.exc import InvalidGitRepositoryError try: b_name = GitRepo( path, search_parent_directories=True ).active_branch.name LOGGER.info('Deriving environment name from git branch %s...', b_name) return get_env_from_branch(b_name) except InvalidGitRepositoryError: pass LOGGER.info('Deriving environment name from directory %s...', path) return get_env_from_directory(os.path.basename(path))
[ "def", "get_env", "(", "path", ",", "ignore_git_branch", "=", "False", ")", ":", "if", "'DEPLOY_ENVIRONMENT'", "in", "os", ".", "environ", ":", "return", "os", ".", "environ", "[", "'DEPLOY_ENVIRONMENT'", "]", "if", "ignore_git_branch", ":", "LOGGER", ".", "...
Determine environment name.
[ "Determine", "environment", "name", "." ]
3f3549ec3bf6e39b9f27d9738a1847f3a4369e7f
https://github.com/onicagroup/runway/blob/3f3549ec3bf6e39b9f27d9738a1847f3a4369e7f/runway/commands/runway_command.py#L137-L163
train
212,362
onicagroup/runway
runway/commands/runway_command.py
RunwayCommand.get_env_dirs
def get_env_dirs(self): """Return list of directories in env_root.""" repo_dirs = next(os.walk(self.env_root))[1] if '.git' in repo_dirs: repo_dirs.remove('.git') # not relevant for any repo operations return repo_dirs
python
def get_env_dirs(self): """Return list of directories in env_root.""" repo_dirs = next(os.walk(self.env_root))[1] if '.git' in repo_dirs: repo_dirs.remove('.git') # not relevant for any repo operations return repo_dirs
[ "def", "get_env_dirs", "(", "self", ")", ":", "repo_dirs", "=", "next", "(", "os", ".", "walk", "(", "self", ".", "env_root", ")", ")", "[", "1", "]", "if", "'.git'", "in", "repo_dirs", ":", "repo_dirs", ".", "remove", "(", "'.git'", ")", "# not rele...
Return list of directories in env_root.
[ "Return", "list", "of", "directories", "in", "env_root", "." ]
3f3549ec3bf6e39b9f27d9738a1847f3a4369e7f
https://github.com/onicagroup/runway/blob/3f3549ec3bf6e39b9f27d9738a1847f3a4369e7f/runway/commands/runway_command.py#L40-L45
train
212,363
onicagroup/runway
runway/commands/runway_command.py
RunwayCommand.get_yaml_files_at_env_root
def get_yaml_files_at_env_root(self): """Return list of yaml files in env_root.""" yaml_files = glob.glob( os.path.join(self.env_root, '*.yaml') ) yml_files = glob.glob( os.path.join(self.env_root, '*.yml') ) return yaml_files + yml_files
python
def get_yaml_files_at_env_root(self): """Return list of yaml files in env_root.""" yaml_files = glob.glob( os.path.join(self.env_root, '*.yaml') ) yml_files = glob.glob( os.path.join(self.env_root, '*.yml') ) return yaml_files + yml_files
[ "def", "get_yaml_files_at_env_root", "(", "self", ")", ":", "yaml_files", "=", "glob", ".", "glob", "(", "os", ".", "path", ".", "join", "(", "self", ".", "env_root", ",", "'*.yaml'", ")", ")", "yml_files", "=", "glob", ".", "glob", "(", "os", ".", "...
Return list of yaml files in env_root.
[ "Return", "list", "of", "yaml", "files", "in", "env_root", "." ]
3f3549ec3bf6e39b9f27d9738a1847f3a4369e7f
https://github.com/onicagroup/runway/blob/3f3549ec3bf6e39b9f27d9738a1847f3a4369e7f/runway/commands/runway_command.py#L51-L59
train
212,364
onicagroup/runway
runway/commands/runway_command.py
RunwayCommand.get_cookbook_dirs
def get_cookbook_dirs(self, base_dir=None): """Find cookbook directories.""" if base_dir is None: base_dir = self.env_root cookbook_dirs = [] dirs_to_skip = set(['.git']) for root, dirs, files in os.walk(base_dir): # pylint: disable=W0612 dirs[:] = [d for d in dirs if d not in dirs_to_skip] for name in files: if name == 'metadata.rb': if 'cookbook' in os.path.basename(os.path.dirname(root)): cookbook_dirs.append(root) return cookbook_dirs
python
def get_cookbook_dirs(self, base_dir=None): """Find cookbook directories.""" if base_dir is None: base_dir = self.env_root cookbook_dirs = [] dirs_to_skip = set(['.git']) for root, dirs, files in os.walk(base_dir): # pylint: disable=W0612 dirs[:] = [d for d in dirs if d not in dirs_to_skip] for name in files: if name == 'metadata.rb': if 'cookbook' in os.path.basename(os.path.dirname(root)): cookbook_dirs.append(root) return cookbook_dirs
[ "def", "get_cookbook_dirs", "(", "self", ",", "base_dir", "=", "None", ")", ":", "if", "base_dir", "is", "None", ":", "base_dir", "=", "self", ".", "env_root", "cookbook_dirs", "=", "[", "]", "dirs_to_skip", "=", "set", "(", "[", "'.git'", "]", ")", "f...
Find cookbook directories.
[ "Find", "cookbook", "directories", "." ]
3f3549ec3bf6e39b9f27d9738a1847f3a4369e7f
https://github.com/onicagroup/runway/blob/3f3549ec3bf6e39b9f27d9738a1847f3a4369e7f/runway/commands/runway_command.py#L61-L74
train
212,365
onicagroup/runway
runway/commands/runway_command.py
RunwayCommand.path_only_contains_dirs
def path_only_contains_dirs(self, path): """Return boolean on whether a path only contains directories.""" pathlistdir = os.listdir(path) if pathlistdir == []: return True if any(os.path.isfile(os.path.join(path, i)) for i in pathlistdir): return False return all(self.path_only_contains_dirs(os.path.join(path, i)) for i in pathlistdir)
python
def path_only_contains_dirs(self, path): """Return boolean on whether a path only contains directories.""" pathlistdir = os.listdir(path) if pathlistdir == []: return True if any(os.path.isfile(os.path.join(path, i)) for i in pathlistdir): return False return all(self.path_only_contains_dirs(os.path.join(path, i)) for i in pathlistdir)
[ "def", "path_only_contains_dirs", "(", "self", ",", "path", ")", ":", "pathlistdir", "=", "os", ".", "listdir", "(", "path", ")", "if", "pathlistdir", "==", "[", "]", ":", "return", "True", "if", "any", "(", "os", ".", "path", ".", "isfile", "(", "os...
Return boolean on whether a path only contains directories.
[ "Return", "boolean", "on", "whether", "a", "path", "only", "contains", "directories", "." ]
3f3549ec3bf6e39b9f27d9738a1847f3a4369e7f
https://github.com/onicagroup/runway/blob/3f3549ec3bf6e39b9f27d9738a1847f3a4369e7f/runway/commands/runway_command.py#L76-L83
train
212,366
onicagroup/runway
runway/commands/runway_command.py
RunwayCommand.get_empty_dirs
def get_empty_dirs(self, path): """Return a list of empty directories in path.""" empty_dirs = [] for i in os.listdir(path): child_path = os.path.join(path, i) if i == '.git' or os.path.isfile(child_path) or os.path.islink(child_path): # noqa continue if self.path_only_contains_dirs(child_path): empty_dirs.append(i) return empty_dirs
python
def get_empty_dirs(self, path): """Return a list of empty directories in path.""" empty_dirs = [] for i in os.listdir(path): child_path = os.path.join(path, i) if i == '.git' or os.path.isfile(child_path) or os.path.islink(child_path): # noqa continue if self.path_only_contains_dirs(child_path): empty_dirs.append(i) return empty_dirs
[ "def", "get_empty_dirs", "(", "self", ",", "path", ")", ":", "empty_dirs", "=", "[", "]", "for", "i", "in", "os", ".", "listdir", "(", "path", ")", ":", "child_path", "=", "os", ".", "path", ".", "join", "(", "path", ",", "i", ")", "if", "i", "...
Return a list of empty directories in path.
[ "Return", "a", "list", "of", "empty", "directories", "in", "path", "." ]
3f3549ec3bf6e39b9f27d9738a1847f3a4369e7f
https://github.com/onicagroup/runway/blob/3f3549ec3bf6e39b9f27d9738a1847f3a4369e7f/runway/commands/runway_command.py#L85-L94
train
212,367
onicagroup/runway
runway/commands/runway_command.py
RunwayCommand.parse_runway_config
def parse_runway_config(self): """Read and parse runway.yml.""" if not os.path.isfile(self.runway_config_path): LOGGER.error("Runway config file was not found (looking for " "%s)", self.runway_config_path) sys.exit(1) with open(self.runway_config_path) as data_file: return yaml.safe_load(data_file)
python
def parse_runway_config(self): """Read and parse runway.yml.""" if not os.path.isfile(self.runway_config_path): LOGGER.error("Runway config file was not found (looking for " "%s)", self.runway_config_path) sys.exit(1) with open(self.runway_config_path) as data_file: return yaml.safe_load(data_file)
[ "def", "parse_runway_config", "(", "self", ")", ":", "if", "not", "os", ".", "path", ".", "isfile", "(", "self", ".", "runway_config_path", ")", ":", "LOGGER", ".", "error", "(", "\"Runway config file was not found (looking for \"", "\"%s)\"", ",", "self", ".", ...
Read and parse runway.yml.
[ "Read", "and", "parse", "runway", ".", "yml", "." ]
3f3549ec3bf6e39b9f27d9738a1847f3a4369e7f
https://github.com/onicagroup/runway/blob/3f3549ec3bf6e39b9f27d9738a1847f3a4369e7f/runway/commands/runway_command.py#L96-L104
train
212,368
onicagroup/runway
runway/commands/runway_command.py
RunwayCommand.runway_config
def runway_config(self): """Return parsed runway.yml.""" if not self._runway_config: self._runway_config = self.parse_runway_config() return self._runway_config
python
def runway_config(self): """Return parsed runway.yml.""" if not self._runway_config: self._runway_config = self.parse_runway_config() return self._runway_config
[ "def", "runway_config", "(", "self", ")", ":", "if", "not", "self", ".", "_runway_config", ":", "self", ".", "_runway_config", "=", "self", ".", "parse_runway_config", "(", ")", "return", "self", ".", "_runway_config" ]
Return parsed runway.yml.
[ "Return", "parsed", "runway", ".", "yml", "." ]
3f3549ec3bf6e39b9f27d9738a1847f3a4369e7f
https://github.com/onicagroup/runway/blob/3f3549ec3bf6e39b9f27d9738a1847f3a4369e7f/runway/commands/runway_command.py#L107-L111
train
212,369
onicagroup/runway
runway/hooks/cleanup_s3.py
purge_bucket
def purge_bucket(context, provider, **kwargs): """Delete objects in bucket.""" session = get_session(provider.region) if kwargs.get('bucket_name'): bucket_name = kwargs['bucket_name'] else: if kwargs.get('bucket_output_lookup'): value = kwargs['bucket_output_lookup'] handler = OutputLookup.handle elif kwargs.get('bucket_rxref_lookup'): value = kwargs['bucket_rxref_lookup'] handler = RxrefLookup.handle elif kwargs.get('bucket_xref_lookup'): value = kwargs['bucket_xref_lookup'] handler = XrefLookup.handle else: LOGGER.fatal('No bucket name/source provided.') return False try: # Exit early if the bucket's stack is already deleted session.client('cloudformation').describe_stacks( StackName=context.get_fqn(value.split('::')[0]) ) except ClientError as exc: if 'does not exist' in exc.response['Error']['Message']: LOGGER.info('S3 bucket stack appears to have already been ' 'deleted...') return True raise bucket_name = handler( value, provider=provider, context=context ) s3_resource = session.resource('s3') try: s3_resource.meta.client.head_bucket(Bucket=bucket_name) except ClientError as exc: if exc.response['Error']['Code'] == '404': LOGGER.info("%s S3 bucket appears to have already been deleted...", bucket_name) return True raise bucket = s3_resource.Bucket(bucket_name) bucket.object_versions.delete() return True
python
def purge_bucket(context, provider, **kwargs): """Delete objects in bucket.""" session = get_session(provider.region) if kwargs.get('bucket_name'): bucket_name = kwargs['bucket_name'] else: if kwargs.get('bucket_output_lookup'): value = kwargs['bucket_output_lookup'] handler = OutputLookup.handle elif kwargs.get('bucket_rxref_lookup'): value = kwargs['bucket_rxref_lookup'] handler = RxrefLookup.handle elif kwargs.get('bucket_xref_lookup'): value = kwargs['bucket_xref_lookup'] handler = XrefLookup.handle else: LOGGER.fatal('No bucket name/source provided.') return False try: # Exit early if the bucket's stack is already deleted session.client('cloudformation').describe_stacks( StackName=context.get_fqn(value.split('::')[0]) ) except ClientError as exc: if 'does not exist' in exc.response['Error']['Message']: LOGGER.info('S3 bucket stack appears to have already been ' 'deleted...') return True raise bucket_name = handler( value, provider=provider, context=context ) s3_resource = session.resource('s3') try: s3_resource.meta.client.head_bucket(Bucket=bucket_name) except ClientError as exc: if exc.response['Error']['Code'] == '404': LOGGER.info("%s S3 bucket appears to have already been deleted...", bucket_name) return True raise bucket = s3_resource.Bucket(bucket_name) bucket.object_versions.delete() return True
[ "def", "purge_bucket", "(", "context", ",", "provider", ",", "*", "*", "kwargs", ")", ":", "session", "=", "get_session", "(", "provider", ".", "region", ")", "if", "kwargs", ".", "get", "(", "'bucket_name'", ")", ":", "bucket_name", "=", "kwargs", "[", ...
Delete objects in bucket.
[ "Delete", "objects", "in", "bucket", "." ]
3f3549ec3bf6e39b9f27d9738a1847f3a4369e7f
https://github.com/onicagroup/runway/blob/3f3549ec3bf6e39b9f27d9738a1847f3a4369e7f/runway/hooks/cleanup_s3.py#L15-L64
train
212,370
onicagroup/runway
runway/commands/runway/whichenv.py
WhichEnv.execute
def execute(self): """Output environment name.""" # Disable other runway logging so the only response is the env name logging.getLogger('runway').setLevel(logging.ERROR) # This may be invoked from a module directory in an environment; # account for that here if necessary if not os.path.isfile('runway.yml'): self.env_root = os.path.dirname(os.getcwd()) self.runway_config_path = os.path.join(self.env_root, 'runway.yml') print(get_env( self.env_root, self.runway_config.get('ignore_git_branch', False) ))
python
def execute(self): """Output environment name.""" # Disable other runway logging so the only response is the env name logging.getLogger('runway').setLevel(logging.ERROR) # This may be invoked from a module directory in an environment; # account for that here if necessary if not os.path.isfile('runway.yml'): self.env_root = os.path.dirname(os.getcwd()) self.runway_config_path = os.path.join(self.env_root, 'runway.yml') print(get_env( self.env_root, self.runway_config.get('ignore_git_branch', False) ))
[ "def", "execute", "(", "self", ")", ":", "# Disable other runway logging so the only response is the env name", "logging", ".", "getLogger", "(", "'runway'", ")", ".", "setLevel", "(", "logging", ".", "ERROR", ")", "# This may be invoked from a module directory in an environm...
Output environment name.
[ "Output", "environment", "name", "." ]
3f3549ec3bf6e39b9f27d9738a1847f3a4369e7f
https://github.com/onicagroup/runway/blob/3f3549ec3bf6e39b9f27d9738a1847f3a4369e7f/runway/commands/runway/whichenv.py#L13-L27
train
212,371
onicagroup/runway
quickstarts/conduit/update_env_endpoint.py
update_api_endpoint
def update_api_endpoint(): """Update app environment file with backend endpoint.""" environment = subprocess.check_output(['pipenv', 'run', 'runway', 'whichenv']).decode().strip() environment_file = os.path.join( os.path.dirname(os.path.realpath(__file__)), 'src', 'environments', 'environment.prod.ts' if environment == 'prod' else 'environment.ts' ) cloudformation = boto3.resource('cloudformation') stack = cloudformation.Stack(STACK_PREFIX + environment) endpoint = [i['OutputValue'] for i in stack.outputs if i['OutputKey'] == 'ServiceEndpoint'][0] with open(environment_file, 'r') as stream: content = stream.read() content = re.sub(r'api_url: \'.*\'$', "api_url: '%s/api'" % endpoint, content, flags=re.M) with open(environment_file, 'w') as stream: stream.write(content)
python
def update_api_endpoint(): """Update app environment file with backend endpoint.""" environment = subprocess.check_output(['pipenv', 'run', 'runway', 'whichenv']).decode().strip() environment_file = os.path.join( os.path.dirname(os.path.realpath(__file__)), 'src', 'environments', 'environment.prod.ts' if environment == 'prod' else 'environment.ts' ) cloudformation = boto3.resource('cloudformation') stack = cloudformation.Stack(STACK_PREFIX + environment) endpoint = [i['OutputValue'] for i in stack.outputs if i['OutputKey'] == 'ServiceEndpoint'][0] with open(environment_file, 'r') as stream: content = stream.read() content = re.sub(r'api_url: \'.*\'$', "api_url: '%s/api'" % endpoint, content, flags=re.M) with open(environment_file, 'w') as stream: stream.write(content)
[ "def", "update_api_endpoint", "(", ")", ":", "environment", "=", "subprocess", ".", "check_output", "(", "[", "'pipenv'", ",", "'run'", ",", "'runway'", ",", "'whichenv'", "]", ")", ".", "decode", "(", ")", ".", "strip", "(", ")", "environment_file", "=", ...
Update app environment file with backend endpoint.
[ "Update", "app", "environment", "file", "with", "backend", "endpoint", "." ]
3f3549ec3bf6e39b9f27d9738a1847f3a4369e7f
https://github.com/onicagroup/runway/blob/3f3549ec3bf6e39b9f27d9738a1847f3a4369e7f/quickstarts/conduit/update_env_endpoint.py#L11-L35
train
212,372
onicagroup/runway
runway/util.py
change_dir
def change_dir(newdir): """Change directory. Adapted from http://stackoverflow.com/a/24176022 """ prevdir = os.getcwd() os.chdir(os.path.expanduser(newdir)) try: yield finally: os.chdir(prevdir)
python
def change_dir(newdir): """Change directory. Adapted from http://stackoverflow.com/a/24176022 """ prevdir = os.getcwd() os.chdir(os.path.expanduser(newdir)) try: yield finally: os.chdir(prevdir)
[ "def", "change_dir", "(", "newdir", ")", ":", "prevdir", "=", "os", ".", "getcwd", "(", ")", "os", ".", "chdir", "(", "os", ".", "path", ".", "expanduser", "(", "newdir", ")", ")", "try", ":", "yield", "finally", ":", "os", ".", "chdir", "(", "pr...
Change directory. Adapted from http://stackoverflow.com/a/24176022
[ "Change", "directory", "." ]
3f3549ec3bf6e39b9f27d9738a1847f3a4369e7f
https://github.com/onicagroup/runway/blob/3f3549ec3bf6e39b9f27d9738a1847f3a4369e7f/runway/util.py#L24-L34
train
212,373
onicagroup/runway
runway/util.py
ensure_file_is_executable
def ensure_file_is_executable(path): """Exit if file is not executable.""" if platform.system() != 'Windows' and ( not stat.S_IXUSR & os.stat(path)[stat.ST_MODE]): print("Error: File %s is not executable" % path) sys.exit(1)
python
def ensure_file_is_executable(path): """Exit if file is not executable.""" if platform.system() != 'Windows' and ( not stat.S_IXUSR & os.stat(path)[stat.ST_MODE]): print("Error: File %s is not executable" % path) sys.exit(1)
[ "def", "ensure_file_is_executable", "(", "path", ")", ":", "if", "platform", ".", "system", "(", ")", "!=", "'Windows'", "and", "(", "not", "stat", ".", "S_IXUSR", "&", "os", ".", "stat", "(", "path", ")", "[", "stat", ".", "ST_MODE", "]", ")", ":", ...
Exit if file is not executable.
[ "Exit", "if", "file", "is", "not", "executable", "." ]
3f3549ec3bf6e39b9f27d9738a1847f3a4369e7f
https://github.com/onicagroup/runway/blob/3f3549ec3bf6e39b9f27d9738a1847f3a4369e7f/runway/util.py#L37-L42
train
212,374
onicagroup/runway
runway/util.py
merge_dicts
def merge_dicts(dict1, dict2, deep_merge=True): """Merge dict2 into dict1.""" if deep_merge: if isinstance(dict1, list) and isinstance(dict2, list): return dict1 + dict2 if not isinstance(dict1, dict) or not isinstance(dict2, dict): return dict2 for key in dict2: dict1[key] = merge_dicts(dict1[key], dict2[key]) if key in dict1 else dict2[key] # noqa pylint: disable=line-too-long return dict1 dict3 = dict1.copy() dict3.update(dict2) return dict3
python
def merge_dicts(dict1, dict2, deep_merge=True): """Merge dict2 into dict1.""" if deep_merge: if isinstance(dict1, list) and isinstance(dict2, list): return dict1 + dict2 if not isinstance(dict1, dict) or not isinstance(dict2, dict): return dict2 for key in dict2: dict1[key] = merge_dicts(dict1[key], dict2[key]) if key in dict1 else dict2[key] # noqa pylint: disable=line-too-long return dict1 dict3 = dict1.copy() dict3.update(dict2) return dict3
[ "def", "merge_dicts", "(", "dict1", ",", "dict2", ",", "deep_merge", "=", "True", ")", ":", "if", "deep_merge", ":", "if", "isinstance", "(", "dict1", ",", "list", ")", "and", "isinstance", "(", "dict2", ",", "list", ")", ":", "return", "dict1", "+", ...
Merge dict2 into dict1.
[ "Merge", "dict2", "into", "dict1", "." ]
3f3549ec3bf6e39b9f27d9738a1847f3a4369e7f
https://github.com/onicagroup/runway/blob/3f3549ec3bf6e39b9f27d9738a1847f3a4369e7f/runway/util.py#L64-L78
train
212,375
onicagroup/runway
runway/util.py
extract_boto_args_from_env
def extract_boto_args_from_env(env_vars): """Return boto3 client args dict with environment creds.""" boto_args = {} for i in ['aws_access_key_id', 'aws_secret_access_key', 'aws_session_token']: if env_vars.get(i.upper()): boto_args[i] = env_vars[i.upper()] return boto_args
python
def extract_boto_args_from_env(env_vars): """Return boto3 client args dict with environment creds.""" boto_args = {} for i in ['aws_access_key_id', 'aws_secret_access_key', 'aws_session_token']: if env_vars.get(i.upper()): boto_args[i] = env_vars[i.upper()] return boto_args
[ "def", "extract_boto_args_from_env", "(", "env_vars", ")", ":", "boto_args", "=", "{", "}", "for", "i", "in", "[", "'aws_access_key_id'", ",", "'aws_secret_access_key'", ",", "'aws_session_token'", "]", ":", "if", "env_vars", ".", "get", "(", "i", ".", "upper"...
Return boto3 client args dict with environment creds.
[ "Return", "boto3", "client", "args", "dict", "with", "environment", "creds", "." ]
3f3549ec3bf6e39b9f27d9738a1847f3a4369e7f
https://github.com/onicagroup/runway/blob/3f3549ec3bf6e39b9f27d9738a1847f3a4369e7f/runway/util.py#L85-L92
train
212,376
onicagroup/runway
runway/util.py
flatten_path_lists
def flatten_path_lists(env_dict, env_root=None): """Join paths in environment dict down to strings.""" for (key, val) in env_dict.items(): # Lists are presumed to be path components and will be turned back # to strings if isinstance(val, list): env_dict[key] = os.path.join(env_root, os.path.join(*val)) if (env_root and not os.path.isabs(os.path.join(*val))) else os.path.join(*val) # noqa pylint: disable=line-too-long return env_dict
python
def flatten_path_lists(env_dict, env_root=None): """Join paths in environment dict down to strings.""" for (key, val) in env_dict.items(): # Lists are presumed to be path components and will be turned back # to strings if isinstance(val, list): env_dict[key] = os.path.join(env_root, os.path.join(*val)) if (env_root and not os.path.isabs(os.path.join(*val))) else os.path.join(*val) # noqa pylint: disable=line-too-long return env_dict
[ "def", "flatten_path_lists", "(", "env_dict", ",", "env_root", "=", "None", ")", ":", "for", "(", "key", ",", "val", ")", "in", "env_dict", ".", "items", "(", ")", ":", "# Lists are presumed to be path components and will be turned back", "# to strings", "if", "is...
Join paths in environment dict down to strings.
[ "Join", "paths", "in", "environment", "dict", "down", "to", "strings", "." ]
3f3549ec3bf6e39b9f27d9738a1847f3a4369e7f
https://github.com/onicagroup/runway/blob/3f3549ec3bf6e39b9f27d9738a1847f3a4369e7f/runway/util.py#L95-L102
train
212,377
onicagroup/runway
runway/util.py
merge_nested_environment_dicts
def merge_nested_environment_dicts(env_dicts, env_name=None, env_root=None): """Return single-level dictionary from dictionary of dictionaries.""" # If the provided dictionary is just a single "level" (no nested # environments), it applies to all environments if all(isinstance(val, (six.string_types, list)) for (_key, val) in env_dicts.items()): return flatten_path_lists(env_dicts, env_root) if env_name is None: if env_dicts.get('*'): return flatten_path_lists(env_dicts.get('*'), env_root) raise AttributeError("Provided config key:val pairs %s aren't usable with no environment provided" % env_dicts) # noqa pylint: disable=line-too-long if not env_dicts.get('*') and not env_dicts.get(env_name): raise AttributeError("Provided config key:val pairs %s aren't usable with environment %s" % (env_dicts, env_name)) # noqa pylint: disable=line-too-long combined_dicts = merge_dicts(env_dicts.get('*', {}), env_dicts.get(env_name, {})) return flatten_path_lists(combined_dicts, env_root)
python
def merge_nested_environment_dicts(env_dicts, env_name=None, env_root=None): """Return single-level dictionary from dictionary of dictionaries.""" # If the provided dictionary is just a single "level" (no nested # environments), it applies to all environments if all(isinstance(val, (six.string_types, list)) for (_key, val) in env_dicts.items()): return flatten_path_lists(env_dicts, env_root) if env_name is None: if env_dicts.get('*'): return flatten_path_lists(env_dicts.get('*'), env_root) raise AttributeError("Provided config key:val pairs %s aren't usable with no environment provided" % env_dicts) # noqa pylint: disable=line-too-long if not env_dicts.get('*') and not env_dicts.get(env_name): raise AttributeError("Provided config key:val pairs %s aren't usable with environment %s" % (env_dicts, env_name)) # noqa pylint: disable=line-too-long combined_dicts = merge_dicts(env_dicts.get('*', {}), env_dicts.get(env_name, {})) return flatten_path_lists(combined_dicts, env_root)
[ "def", "merge_nested_environment_dicts", "(", "env_dicts", ",", "env_name", "=", "None", ",", "env_root", "=", "None", ")", ":", "# If the provided dictionary is just a single \"level\" (no nested", "# environments), it applies to all environments", "if", "all", "(", "isinstanc...
Return single-level dictionary from dictionary of dictionaries.
[ "Return", "single", "-", "level", "dictionary", "from", "dictionary", "of", "dictionaries", "." ]
3f3549ec3bf6e39b9f27d9738a1847f3a4369e7f
https://github.com/onicagroup/runway/blob/3f3549ec3bf6e39b9f27d9738a1847f3a4369e7f/runway/util.py#L105-L123
train
212,378
onicagroup/runway
runway/util.py
get_embedded_lib_path
def get_embedded_lib_path(): """Return path of embedded libraries.""" return os.path.join( os.path.dirname(os.path.abspath(__file__)), 'embedded' )
python
def get_embedded_lib_path(): """Return path of embedded libraries.""" return os.path.join( os.path.dirname(os.path.abspath(__file__)), 'embedded' )
[ "def", "get_embedded_lib_path", "(", ")", ":", "return", "os", ".", "path", ".", "join", "(", "os", ".", "path", ".", "dirname", "(", "os", ".", "path", ".", "abspath", "(", "__file__", ")", ")", ",", "'embedded'", ")" ]
Return path of embedded libraries.
[ "Return", "path", "of", "embedded", "libraries", "." ]
3f3549ec3bf6e39b9f27d9738a1847f3a4369e7f
https://github.com/onicagroup/runway/blob/3f3549ec3bf6e39b9f27d9738a1847f3a4369e7f/runway/util.py#L134-L139
train
212,379
onicagroup/runway
runway/util.py
get_hash_for_filename
def get_hash_for_filename(filename, hashfile_path): """Return hash for filename in the hashfile.""" filehash = '' with open(hashfile_path, 'r') as stream: for _cnt, line in enumerate(stream): if line.rstrip().endswith(filename): filehash = re.match(r'^[A-Za-z0-9]*', line).group(0) break if filehash: return filehash raise AttributeError("Filename %s not found in hash file" % filename)
python
def get_hash_for_filename(filename, hashfile_path): """Return hash for filename in the hashfile.""" filehash = '' with open(hashfile_path, 'r') as stream: for _cnt, line in enumerate(stream): if line.rstrip().endswith(filename): filehash = re.match(r'^[A-Za-z0-9]*', line).group(0) break if filehash: return filehash raise AttributeError("Filename %s not found in hash file" % filename)
[ "def", "get_hash_for_filename", "(", "filename", ",", "hashfile_path", ")", ":", "filehash", "=", "''", "with", "open", "(", "hashfile_path", ",", "'r'", ")", "as", "stream", ":", "for", "_cnt", ",", "line", "in", "enumerate", "(", "stream", ")", ":", "i...
Return hash for filename in the hashfile.
[ "Return", "hash", "for", "filename", "in", "the", "hashfile", "." ]
3f3549ec3bf6e39b9f27d9738a1847f3a4369e7f
https://github.com/onicagroup/runway/blob/3f3549ec3bf6e39b9f27d9738a1847f3a4369e7f/runway/util.py#L142-L152
train
212,380
onicagroup/runway
runway/util.py
fix_windows_command_list
def fix_windows_command_list(commands): # type: (List[str]) -> List[str] """Return command list with working Windows commands. npm on windows is npm.cmd, which will blow up subprocess.check_call(['npm', '...']) Similar issues arise when calling python apps like pipenv that will have a windows-only suffix applied to them """ fully_qualified_cmd_path = which(commands[0]) if fully_qualified_cmd_path and ( not which(commands[0], add_win_suffixes=False)): commands[0] = os.path.basename(fully_qualified_cmd_path) return commands
python
def fix_windows_command_list(commands): # type: (List[str]) -> List[str] """Return command list with working Windows commands. npm on windows is npm.cmd, which will blow up subprocess.check_call(['npm', '...']) Similar issues arise when calling python apps like pipenv that will have a windows-only suffix applied to them """ fully_qualified_cmd_path = which(commands[0]) if fully_qualified_cmd_path and ( not which(commands[0], add_win_suffixes=False)): commands[0] = os.path.basename(fully_qualified_cmd_path) return commands
[ "def", "fix_windows_command_list", "(", "commands", ")", ":", "# type: (List[str]) -> List[str]", "fully_qualified_cmd_path", "=", "which", "(", "commands", "[", "0", "]", ")", "if", "fully_qualified_cmd_path", "and", "(", "not", "which", "(", "commands", "[", "0", ...
Return command list with working Windows commands. npm on windows is npm.cmd, which will blow up subprocess.check_call(['npm', '...']) Similar issues arise when calling python apps like pipenv that will have a windows-only suffix applied to them
[ "Return", "command", "list", "with", "working", "Windows", "commands", "." ]
3f3549ec3bf6e39b9f27d9738a1847f3a4369e7f
https://github.com/onicagroup/runway/blob/3f3549ec3bf6e39b9f27d9738a1847f3a4369e7f/runway/util.py#L165-L179
train
212,381
onicagroup/runway
runway/util.py
run_commands
def run_commands(commands, # type: List[Union[str, List[str], Dict[str, Union[str, List[str]]]]] directory, # type: str env=None # type: Optional[Dict[str, Union[str, int]]] ): # noqa # type: (...) -> None """Run list of commands.""" if env is None: env = os.environ.copy() for step in commands: if isinstance(step, (list, six.string_types)): execution_dir = directory raw_command = step elif step.get('command'): # dictionary execution_dir = os.path.join(directory, step.get('cwd')) if step.get('cwd') else directory # noqa pylint: disable=line-too-long raw_command = step['command'] else: raise AttributeError("Invalid command step: %s" % step) command_list = raw_command.split(' ') if isinstance(raw_command, six.string_types) else raw_command # noqa pylint: disable=line-too-long if platform.system().lower() == 'windows': command_list = fix_windows_command_list(command_list) with change_dir(execution_dir): check_call(command_list, env=env)
python
def run_commands(commands, # type: List[Union[str, List[str], Dict[str, Union[str, List[str]]]]] directory, # type: str env=None # type: Optional[Dict[str, Union[str, int]]] ): # noqa # type: (...) -> None """Run list of commands.""" if env is None: env = os.environ.copy() for step in commands: if isinstance(step, (list, six.string_types)): execution_dir = directory raw_command = step elif step.get('command'): # dictionary execution_dir = os.path.join(directory, step.get('cwd')) if step.get('cwd') else directory # noqa pylint: disable=line-too-long raw_command = step['command'] else: raise AttributeError("Invalid command step: %s" % step) command_list = raw_command.split(' ') if isinstance(raw_command, six.string_types) else raw_command # noqa pylint: disable=line-too-long if platform.system().lower() == 'windows': command_list = fix_windows_command_list(command_list) with change_dir(execution_dir): check_call(command_list, env=env)
[ "def", "run_commands", "(", "commands", ",", "# type: List[Union[str, List[str], Dict[str, Union[str, List[str]]]]]", "directory", ",", "# type: str", "env", "=", "None", "# type: Optional[Dict[str, Union[str, int]]]", ")", ":", "# noqa", "# type: (...) -> None", "if", "env", "...
Run list of commands.
[ "Run", "list", "of", "commands", "." ]
3f3549ec3bf6e39b9f27d9738a1847f3a4369e7f
https://github.com/onicagroup/runway/blob/3f3549ec3bf6e39b9f27d9738a1847f3a4369e7f/runway/util.py#L182-L205
train
212,382
onicagroup/runway
runway/util.py
sha256sum
def sha256sum(filename): """Return SHA256 hash of file.""" sha256 = hashlib.sha256() mem_view = memoryview(bytearray(128*1024)) with open(filename, 'rb', buffering=0) as stream: for i in iter(lambda: stream.readinto(mem_view), 0): sha256.update(mem_view[:i]) return sha256.hexdigest()
python
def sha256sum(filename): """Return SHA256 hash of file.""" sha256 = hashlib.sha256() mem_view = memoryview(bytearray(128*1024)) with open(filename, 'rb', buffering=0) as stream: for i in iter(lambda: stream.readinto(mem_view), 0): sha256.update(mem_view[:i]) return sha256.hexdigest()
[ "def", "sha256sum", "(", "filename", ")", ":", "sha256", "=", "hashlib", ".", "sha256", "(", ")", "mem_view", "=", "memoryview", "(", "bytearray", "(", "128", "*", "1024", ")", ")", "with", "open", "(", "filename", ",", "'rb'", ",", "buffering", "=", ...
Return SHA256 hash of file.
[ "Return", "SHA256", "hash", "of", "file", "." ]
3f3549ec3bf6e39b9f27d9738a1847f3a4369e7f
https://github.com/onicagroup/runway/blob/3f3549ec3bf6e39b9f27d9738a1847f3a4369e7f/runway/util.py#L208-L215
train
212,383
onicagroup/runway
runway/util.py
use_embedded_pkgs
def use_embedded_pkgs(embedded_lib_path=None): """Temporarily prepend embedded packages to sys.path.""" if embedded_lib_path is None: embedded_lib_path = get_embedded_lib_path() old_sys_path = list(sys.path) sys.path.insert( 1, # https://stackoverflow.com/a/10097543 embedded_lib_path ) try: yield finally: sys.path = old_sys_path
python
def use_embedded_pkgs(embedded_lib_path=None): """Temporarily prepend embedded packages to sys.path.""" if embedded_lib_path is None: embedded_lib_path = get_embedded_lib_path() old_sys_path = list(sys.path) sys.path.insert( 1, # https://stackoverflow.com/a/10097543 embedded_lib_path ) try: yield finally: sys.path = old_sys_path
[ "def", "use_embedded_pkgs", "(", "embedded_lib_path", "=", "None", ")", ":", "if", "embedded_lib_path", "is", "None", ":", "embedded_lib_path", "=", "get_embedded_lib_path", "(", ")", "old_sys_path", "=", "list", "(", "sys", ".", "path", ")", "sys", ".", "path...
Temporarily prepend embedded packages to sys.path.
[ "Temporarily", "prepend", "embedded", "packages", "to", "sys", ".", "path", "." ]
3f3549ec3bf6e39b9f27d9738a1847f3a4369e7f
https://github.com/onicagroup/runway/blob/3f3549ec3bf6e39b9f27d9738a1847f3a4369e7f/runway/util.py#L219-L232
train
212,384
onicagroup/runway
runway/util.py
which
def which(program, add_win_suffixes=True): """Mimic 'which' command behavior. Adapted from https://stackoverflow.com/a/377028 """ def is_exe(fpath): """Determine if program exists and is executable.""" return os.path.isfile(fpath) and os.access(fpath, os.X_OK) fpath, fname = os.path.split(program) if add_win_suffixes and platform.system().lower() == 'windows' and not ( fname.endswith('.exe') or fname.endswith('.cmd')): fnames = [fname + '.exe', fname + '.cmd'] else: fnames = [fname] for i in fnames: if fpath: exe_file = os.path.join(fpath, i) if is_exe(exe_file): return exe_file else: for path in os.environ['PATH'].split(os.pathsep): exe_file = os.path.join(path, i) if is_exe(exe_file): return exe_file return None
python
def which(program, add_win_suffixes=True): """Mimic 'which' command behavior. Adapted from https://stackoverflow.com/a/377028 """ def is_exe(fpath): """Determine if program exists and is executable.""" return os.path.isfile(fpath) and os.access(fpath, os.X_OK) fpath, fname = os.path.split(program) if add_win_suffixes and platform.system().lower() == 'windows' and not ( fname.endswith('.exe') or fname.endswith('.cmd')): fnames = [fname + '.exe', fname + '.cmd'] else: fnames = [fname] for i in fnames: if fpath: exe_file = os.path.join(fpath, i) if is_exe(exe_file): return exe_file else: for path in os.environ['PATH'].split(os.pathsep): exe_file = os.path.join(path, i) if is_exe(exe_file): return exe_file return None
[ "def", "which", "(", "program", ",", "add_win_suffixes", "=", "True", ")", ":", "def", "is_exe", "(", "fpath", ")", ":", "\"\"\"Determine if program exists and is executable.\"\"\"", "return", "os", ".", "path", ".", "isfile", "(", "fpath", ")", "and", "os", "...
Mimic 'which' command behavior. Adapted from https://stackoverflow.com/a/377028
[ "Mimic", "which", "command", "behavior", "." ]
3f3549ec3bf6e39b9f27d9738a1847f3a4369e7f
https://github.com/onicagroup/runway/blob/3f3549ec3bf6e39b9f27d9738a1847f3a4369e7f/runway/util.py#L235-L262
train
212,385
onicagroup/runway
runway/module/terraform.py
create_config_backend_options
def create_config_backend_options(module_opts, env_name, env_vars): """Return backend options defined in module options.""" backend_opts = {} if module_opts.get('terraform_backend_config'): backend_opts['config'] = merge_nested_environment_dicts( module_opts.get('terraform_backend_config'), env_name ) if module_opts.get('terraform_backend_cfn_outputs'): if not backend_opts.get('config'): backend_opts['config'] = {} if not backend_opts['config'].get('region'): backend_opts['config']['region'] = env_vars['AWS_DEFAULT_REGION'] boto_args = extract_boto_args_from_env(env_vars) cfn_client = boto3.client( 'cloudformation', region_name=backend_opts['config']['region'], **boto_args ) for (key, val) in merge_nested_environment_dicts(module_opts.get('terraform_backend_cfn_outputs'), # noqa pylint: disable=line-too-long env_name).items(): backend_opts['config'][key] = find_cfn_output( val.split('::')[1], cfn_client.describe_stacks( StackName=val.split('::')[0] )['Stacks'][0]['Outputs'] ) return backend_opts
python
def create_config_backend_options(module_opts, env_name, env_vars): """Return backend options defined in module options.""" backend_opts = {} if module_opts.get('terraform_backend_config'): backend_opts['config'] = merge_nested_environment_dicts( module_opts.get('terraform_backend_config'), env_name ) if module_opts.get('terraform_backend_cfn_outputs'): if not backend_opts.get('config'): backend_opts['config'] = {} if not backend_opts['config'].get('region'): backend_opts['config']['region'] = env_vars['AWS_DEFAULT_REGION'] boto_args = extract_boto_args_from_env(env_vars) cfn_client = boto3.client( 'cloudformation', region_name=backend_opts['config']['region'], **boto_args ) for (key, val) in merge_nested_environment_dicts(module_opts.get('terraform_backend_cfn_outputs'), # noqa pylint: disable=line-too-long env_name).items(): backend_opts['config'][key] = find_cfn_output( val.split('::')[1], cfn_client.describe_stacks( StackName=val.split('::')[0] )['Stacks'][0]['Outputs'] ) return backend_opts
[ "def", "create_config_backend_options", "(", "module_opts", ",", "env_name", ",", "env_vars", ")", ":", "backend_opts", "=", "{", "}", "if", "module_opts", ".", "get", "(", "'terraform_backend_config'", ")", ":", "backend_opts", "[", "'config'", "]", "=", "merge...
Return backend options defined in module options.
[ "Return", "backend", "options", "defined", "in", "module", "options", "." ]
3f3549ec3bf6e39b9f27d9738a1847f3a4369e7f
https://github.com/onicagroup/runway/blob/3f3549ec3bf6e39b9f27d9738a1847f3a4369e7f/runway/module/terraform.py#L25-L55
train
212,386
onicagroup/runway
runway/module/terraform.py
get_backend_init_list
def get_backend_init_list(backend_vals): """Turn backend config dict into command line items.""" cmd_list = [] for (key, val) in backend_vals.items(): cmd_list.append('-backend-config') cmd_list.append(key + '=' + val) return cmd_list
python
def get_backend_init_list(backend_vals): """Turn backend config dict into command line items.""" cmd_list = [] for (key, val) in backend_vals.items(): cmd_list.append('-backend-config') cmd_list.append(key + '=' + val) return cmd_list
[ "def", "get_backend_init_list", "(", "backend_vals", ")", ":", "cmd_list", "=", "[", "]", "for", "(", "key", ",", "val", ")", "in", "backend_vals", ".", "items", "(", ")", ":", "cmd_list", ".", "append", "(", "'-backend-config'", ")", "cmd_list", ".", "a...
Turn backend config dict into command line items.
[ "Turn", "backend", "config", "dict", "into", "command", "line", "items", "." ]
3f3549ec3bf6e39b9f27d9738a1847f3a4369e7f
https://github.com/onicagroup/runway/blob/3f3549ec3bf6e39b9f27d9738a1847f3a4369e7f/runway/module/terraform.py#L58-L64
train
212,387
onicagroup/runway
runway/module/terraform.py
get_backend_tfvars_file
def get_backend_tfvars_file(path, environment, region): """Determine Terraform backend file.""" backend_filenames = gen_backend_tfvars_files(environment, region) for name in backend_filenames: if os.path.isfile(os.path.join(path, name)): return name return backend_filenames[-1]
python
def get_backend_tfvars_file(path, environment, region): """Determine Terraform backend file.""" backend_filenames = gen_backend_tfvars_files(environment, region) for name in backend_filenames: if os.path.isfile(os.path.join(path, name)): return name return backend_filenames[-1]
[ "def", "get_backend_tfvars_file", "(", "path", ",", "environment", ",", "region", ")", ":", "backend_filenames", "=", "gen_backend_tfvars_files", "(", "environment", ",", "region", ")", "for", "name", "in", "backend_filenames", ":", "if", "os", ".", "path", ".",...
Determine Terraform backend file.
[ "Determine", "Terraform", "backend", "file", "." ]
3f3549ec3bf6e39b9f27d9738a1847f3a4369e7f
https://github.com/onicagroup/runway/blob/3f3549ec3bf6e39b9f27d9738a1847f3a4369e7f/runway/module/terraform.py#L77-L83
train
212,388
onicagroup/runway
runway/module/terraform.py
get_module_defined_tf_var
def get_module_defined_tf_var(terraform_version_opts, env_name): """Return version of Terraform requested in module options.""" if isinstance(terraform_version_opts, six.string_types): return terraform_version_opts if terraform_version_opts.get(env_name): return terraform_version_opts.get(env_name) if terraform_version_opts.get('*'): return terraform_version_opts.get('*') return None
python
def get_module_defined_tf_var(terraform_version_opts, env_name): """Return version of Terraform requested in module options.""" if isinstance(terraform_version_opts, six.string_types): return terraform_version_opts if terraform_version_opts.get(env_name): return terraform_version_opts.get(env_name) if terraform_version_opts.get('*'): return terraform_version_opts.get('*') return None
[ "def", "get_module_defined_tf_var", "(", "terraform_version_opts", ",", "env_name", ")", ":", "if", "isinstance", "(", "terraform_version_opts", ",", "six", ".", "string_types", ")", ":", "return", "terraform_version_opts", "if", "terraform_version_opts", ".", "get", ...
Return version of Terraform requested in module options.
[ "Return", "version", "of", "Terraform", "requested", "in", "module", "options", "." ]
3f3549ec3bf6e39b9f27d9738a1847f3a4369e7f
https://github.com/onicagroup/runway/blob/3f3549ec3bf6e39b9f27d9738a1847f3a4369e7f/runway/module/terraform.py#L86-L94
train
212,389
onicagroup/runway
runway/module/terraform.py
get_workspace_tfvars_file
def get_workspace_tfvars_file(path, environment, region): """Determine Terraform workspace-specific tfvars file name.""" for name in gen_workspace_tfvars_files(environment, region): if os.path.isfile(os.path.join(path, name)): return name return "%s.tfvars" % environment
python
def get_workspace_tfvars_file(path, environment, region): """Determine Terraform workspace-specific tfvars file name.""" for name in gen_workspace_tfvars_files(environment, region): if os.path.isfile(os.path.join(path, name)): return name return "%s.tfvars" % environment
[ "def", "get_workspace_tfvars_file", "(", "path", ",", "environment", ",", "region", ")", ":", "for", "name", "in", "gen_workspace_tfvars_files", "(", "environment", ",", "region", ")", ":", "if", "os", ".", "path", ".", "isfile", "(", "os", ".", "path", "....
Determine Terraform workspace-specific tfvars file name.
[ "Determine", "Terraform", "workspace", "-", "specific", "tfvars", "file", "name", "." ]
3f3549ec3bf6e39b9f27d9738a1847f3a4369e7f
https://github.com/onicagroup/runway/blob/3f3549ec3bf6e39b9f27d9738a1847f3a4369e7f/runway/module/terraform.py#L107-L112
train
212,390
onicagroup/runway
runway/module/terraform.py
reinit_on_backend_changes
def reinit_on_backend_changes(tf_bin, # pylint: disable=too-many-arguments module_path, backend_options, env_name, env_region, env_vars): """Clean terraform directory and run init if necessary. If deploying a TF module to multiple regions (or any scenario requiring multiple backend configs), switching the backend will cause TF to compare the old and new backends. This will frequently cause an access error as the creds/role for the new backend won't always have access to the old one. This method compares the defined & initialized backend configs and trashes the terraform directory & re-inits if they're out of sync. """ terraform_dir = os.path.join(module_path, '.terraform') local_tfstate_path = os.path.join(terraform_dir, 'terraform.tfstate') current_backend_config = {} desired_backend_config = {} LOGGER.debug('Comparing previous & desired Terraform backend configs') if os.path.isfile(local_tfstate_path): with open(local_tfstate_path, 'r') as stream: current_backend_config = hcl.load(stream).get('backend', {}).get('config', {}) if backend_options.get('config'): desired_backend_config = backend_options.get('config') elif os.path.isfile(os.path.join(module_path, backend_options.get('filename'))): with open(os.path.join(module_path, backend_options.get('filename')), 'r') as stream: desired_backend_config = hcl.load(stream) # Can't solely rely on the backend info defined in runway options or # backend files; merge in the values defined in main.tf # (or whatever tf file) for filename in ['main.tf'] + glob.glob(os.path.join(module_path, '*.tf')): if os.path.isfile(filename): with open(filename, 'r') as stream: tf_config = hcl.load(stream) if tf_config.get('terraform', {}).get('backend'): [(_s3key, tffile_backend_config)] = tf_config['terraform']['backend'].items() # noqa pylint: disable=line-too-long desired_backend_config = merge_dicts( desired_backend_config, tffile_backend_config ) break if current_backend_config != desired_backend_config: LOGGER.info("Desired and previously initialized TF backend config is " "out of sync; trashing local TF state directory %s", terraform_dir) send2trash(terraform_dir) run_terraform_init( tf_bin=tf_bin, module_path=module_path, backend_options=backend_options, env_name=env_name, env_region=env_region, env_vars=env_vars )
python
def reinit_on_backend_changes(tf_bin, # pylint: disable=too-many-arguments module_path, backend_options, env_name, env_region, env_vars): """Clean terraform directory and run init if necessary. If deploying a TF module to multiple regions (or any scenario requiring multiple backend configs), switching the backend will cause TF to compare the old and new backends. This will frequently cause an access error as the creds/role for the new backend won't always have access to the old one. This method compares the defined & initialized backend configs and trashes the terraform directory & re-inits if they're out of sync. """ terraform_dir = os.path.join(module_path, '.terraform') local_tfstate_path = os.path.join(terraform_dir, 'terraform.tfstate') current_backend_config = {} desired_backend_config = {} LOGGER.debug('Comparing previous & desired Terraform backend configs') if os.path.isfile(local_tfstate_path): with open(local_tfstate_path, 'r') as stream: current_backend_config = hcl.load(stream).get('backend', {}).get('config', {}) if backend_options.get('config'): desired_backend_config = backend_options.get('config') elif os.path.isfile(os.path.join(module_path, backend_options.get('filename'))): with open(os.path.join(module_path, backend_options.get('filename')), 'r') as stream: desired_backend_config = hcl.load(stream) # Can't solely rely on the backend info defined in runway options or # backend files; merge in the values defined in main.tf # (or whatever tf file) for filename in ['main.tf'] + glob.glob(os.path.join(module_path, '*.tf')): if os.path.isfile(filename): with open(filename, 'r') as stream: tf_config = hcl.load(stream) if tf_config.get('terraform', {}).get('backend'): [(_s3key, tffile_backend_config)] = tf_config['terraform']['backend'].items() # noqa pylint: disable=line-too-long desired_backend_config = merge_dicts( desired_backend_config, tffile_backend_config ) break if current_backend_config != desired_backend_config: LOGGER.info("Desired and previously initialized TF backend config is " "out of sync; trashing local TF state directory %s", terraform_dir) send2trash(terraform_dir) run_terraform_init( tf_bin=tf_bin, module_path=module_path, backend_options=backend_options, env_name=env_name, env_region=env_region, env_vars=env_vars )
[ "def", "reinit_on_backend_changes", "(", "tf_bin", ",", "# pylint: disable=too-many-arguments", "module_path", ",", "backend_options", ",", "env_name", ",", "env_region", ",", "env_vars", ")", ":", "terraform_dir", "=", "os", ".", "path", ".", "join", "(", "module_p...
Clean terraform directory and run init if necessary. If deploying a TF module to multiple regions (or any scenario requiring multiple backend configs), switching the backend will cause TF to compare the old and new backends. This will frequently cause an access error as the creds/role for the new backend won't always have access to the old one. This method compares the defined & initialized backend configs and trashes the terraform directory & re-inits if they're out of sync.
[ "Clean", "terraform", "directory", "and", "run", "init", "if", "necessary", "." ]
3f3549ec3bf6e39b9f27d9738a1847f3a4369e7f
https://github.com/onicagroup/runway/blob/3f3549ec3bf6e39b9f27d9738a1847f3a4369e7f/runway/module/terraform.py#L115-L177
train
212,391
onicagroup/runway
runway/module/terraform.py
run_terraform_init
def run_terraform_init(tf_bin, # pylint: disable=too-many-arguments module_path, backend_options, env_name, env_region, env_vars): """Run Terraform init.""" init_cmd = [tf_bin, 'init'] cmd_opts = {'env_vars': env_vars, 'exit_on_error': False} if backend_options.get('config'): LOGGER.info('Using provided backend values "%s"', str(backend_options.get('config'))) cmd_opts['cmd_list'] = init_cmd + get_backend_init_list(backend_options.get('config')) # noqa pylint: disable=line-too-long elif os.path.isfile(os.path.join(module_path, backend_options.get('filename'))): LOGGER.info('Using backend config file %s', backend_options.get('filename')) cmd_opts['cmd_list'] = init_cmd + ['-backend-config=%s' % backend_options.get('filename')] # noqa pylint: disable=line-too-long else: LOGGER.info( "No backend tfvars file found -- looking for one " "of \"%s\" (proceeding with bare 'terraform " "init')", ', '.join(gen_backend_tfvars_files( env_name, env_region))) cmd_opts['cmd_list'] = init_cmd try: run_module_command(**cmd_opts) except subprocess.CalledProcessError as shelloutexc: # An error during initialization can leave things in an inconsistent # state (e.g. backend configured but no providers downloaded). Marking # this with a file so it will be deleted on the next run. if os.path.isdir(os.path.join(module_path, '.terraform')): with open(os.path.join(module_path, '.terraform', FAILED_INIT_FILENAME), 'w') as stream: stream.write('1') sys.exit(shelloutexc.returncode)
python
def run_terraform_init(tf_bin, # pylint: disable=too-many-arguments module_path, backend_options, env_name, env_region, env_vars): """Run Terraform init.""" init_cmd = [tf_bin, 'init'] cmd_opts = {'env_vars': env_vars, 'exit_on_error': False} if backend_options.get('config'): LOGGER.info('Using provided backend values "%s"', str(backend_options.get('config'))) cmd_opts['cmd_list'] = init_cmd + get_backend_init_list(backend_options.get('config')) # noqa pylint: disable=line-too-long elif os.path.isfile(os.path.join(module_path, backend_options.get('filename'))): LOGGER.info('Using backend config file %s', backend_options.get('filename')) cmd_opts['cmd_list'] = init_cmd + ['-backend-config=%s' % backend_options.get('filename')] # noqa pylint: disable=line-too-long else: LOGGER.info( "No backend tfvars file found -- looking for one " "of \"%s\" (proceeding with bare 'terraform " "init')", ', '.join(gen_backend_tfvars_files( env_name, env_region))) cmd_opts['cmd_list'] = init_cmd try: run_module_command(**cmd_opts) except subprocess.CalledProcessError as shelloutexc: # An error during initialization can leave things in an inconsistent # state (e.g. backend configured but no providers downloaded). Marking # this with a file so it will be deleted on the next run. if os.path.isdir(os.path.join(module_path, '.terraform')): with open(os.path.join(module_path, '.terraform', FAILED_INIT_FILENAME), 'w') as stream: stream.write('1') sys.exit(shelloutexc.returncode)
[ "def", "run_terraform_init", "(", "tf_bin", ",", "# pylint: disable=too-many-arguments", "module_path", ",", "backend_options", ",", "env_name", ",", "env_region", ",", "env_vars", ")", ":", "init_cmd", "=", "[", "tf_bin", ",", "'init'", "]", "cmd_opts", "=", "{",...
Run Terraform init.
[ "Run", "Terraform", "init", "." ]
3f3549ec3bf6e39b9f27d9738a1847f3a4369e7f
https://github.com/onicagroup/runway/blob/3f3549ec3bf6e39b9f27d9738a1847f3a4369e7f/runway/module/terraform.py#L180-L217
train
212,392
onicagroup/runway
runway/commands/command_loader.py
find_command_class
def find_command_class(possible_command_names): """Try to find a class for one of the given command names.""" for command_name in possible_command_names: if hasattr(ALL_COMMANDS_MODULE, command_name): command_module = getattr(ALL_COMMANDS_MODULE, command_name) command_class_hierarchy = getmembers(command_module, isclass) command_class_tuple = list(filter(_not_base_class, command_class_hierarchy))[0] return command_class_tuple[1] return None
python
def find_command_class(possible_command_names): """Try to find a class for one of the given command names.""" for command_name in possible_command_names: if hasattr(ALL_COMMANDS_MODULE, command_name): command_module = getattr(ALL_COMMANDS_MODULE, command_name) command_class_hierarchy = getmembers(command_module, isclass) command_class_tuple = list(filter(_not_base_class, command_class_hierarchy))[0] return command_class_tuple[1] return None
[ "def", "find_command_class", "(", "possible_command_names", ")", ":", "for", "command_name", "in", "possible_command_names", ":", "if", "hasattr", "(", "ALL_COMMANDS_MODULE", ",", "command_name", ")", ":", "command_module", "=", "getattr", "(", "ALL_COMMANDS_MODULE", ...
Try to find a class for one of the given command names.
[ "Try", "to", "find", "a", "class", "for", "one", "of", "the", "given", "command", "names", "." ]
3f3549ec3bf6e39b9f27d9738a1847f3a4369e7f
https://github.com/onicagroup/runway/blob/3f3549ec3bf6e39b9f27d9738a1847f3a4369e7f/runway/commands/command_loader.py#L10-L18
train
212,393
onicagroup/runway
runway/commands/runway/gen_sample.py
generate_sample_module
def generate_sample_module(module_dir): """Generate skeleton sample module.""" if os.path.isdir(module_dir): LOGGER.error("Error generating sample module -- directory %s " "already exists!", module_dir) sys.exit(1) os.mkdir(module_dir)
python
def generate_sample_module(module_dir): """Generate skeleton sample module.""" if os.path.isdir(module_dir): LOGGER.error("Error generating sample module -- directory %s " "already exists!", module_dir) sys.exit(1) os.mkdir(module_dir)
[ "def", "generate_sample_module", "(", "module_dir", ")", ":", "if", "os", ".", "path", ".", "isdir", "(", "module_dir", ")", ":", "LOGGER", ".", "error", "(", "\"Error generating sample module -- directory %s \"", "\"already exists!\"", ",", "module_dir", ")", "sys"...
Generate skeleton sample module.
[ "Generate", "skeleton", "sample", "module", "." ]
3f3549ec3bf6e39b9f27d9738a1847f3a4369e7f
https://github.com/onicagroup/runway/blob/3f3549ec3bf6e39b9f27d9738a1847f3a4369e7f/runway/commands/runway/gen_sample.py#L18-L25
train
212,394
onicagroup/runway
runway/commands/runway/gen_sample.py
generate_sample_sls_module
def generate_sample_sls_module(env_root, module_dir=None): """Generate skeleton Serverless sample module.""" if module_dir is None: module_dir = os.path.join(env_root, 'sampleapp.sls') generate_sample_module(module_dir) for i in ['config-dev-us-east-1.json', 'handler.py', 'package.json', 'serverless.yml']: shutil.copyfile( os.path.join(ROOT, 'templates', 'serverless', i), os.path.join(module_dir, i), ) LOGGER.info("Sample Serverless module created at %s", module_dir)
python
def generate_sample_sls_module(env_root, module_dir=None): """Generate skeleton Serverless sample module.""" if module_dir is None: module_dir = os.path.join(env_root, 'sampleapp.sls') generate_sample_module(module_dir) for i in ['config-dev-us-east-1.json', 'handler.py', 'package.json', 'serverless.yml']: shutil.copyfile( os.path.join(ROOT, 'templates', 'serverless', i), os.path.join(module_dir, i), ) LOGGER.info("Sample Serverless module created at %s", module_dir)
[ "def", "generate_sample_sls_module", "(", "env_root", ",", "module_dir", "=", "None", ")", ":", "if", "module_dir", "is", "None", ":", "module_dir", "=", "os", ".", "path", ".", "join", "(", "env_root", ",", "'sampleapp.sls'", ")", "generate_sample_module", "(...
Generate skeleton Serverless sample module.
[ "Generate", "skeleton", "Serverless", "sample", "module", "." ]
3f3549ec3bf6e39b9f27d9738a1847f3a4369e7f
https://github.com/onicagroup/runway/blob/3f3549ec3bf6e39b9f27d9738a1847f3a4369e7f/runway/commands/runway/gen_sample.py#L28-L43
train
212,395
onicagroup/runway
runway/commands/runway/gen_sample.py
generate_sample_sls_tsc_module
def generate_sample_sls_tsc_module(env_root, module_dir=None): """Generate skeleton Serverless TypeScript sample module.""" if module_dir is None: module_dir = os.path.join(env_root, 'sampleapp.sls') generate_sample_module(module_dir) for i in ['package.json', 'serverless.yml', 'tsconfig.json', 'webpack.config.js']: shutil.copyfile( os.path.join(ROOT, 'templates', 'sls-tsc', i), os.path.join(module_dir, i), ) os.mkdir(os.path.join(module_dir, 'src')) for i in ['handler.spec.ts', 'handler.ts']: shutil.copyfile( os.path.join(ROOT, 'templates', 'sls-tsc', 'src', i), os.path.join(module_dir, 'src', i), ) LOGGER.info("Sample Serverless TypeScript module created at %s", module_dir)
python
def generate_sample_sls_tsc_module(env_root, module_dir=None): """Generate skeleton Serverless TypeScript sample module.""" if module_dir is None: module_dir = os.path.join(env_root, 'sampleapp.sls') generate_sample_module(module_dir) for i in ['package.json', 'serverless.yml', 'tsconfig.json', 'webpack.config.js']: shutil.copyfile( os.path.join(ROOT, 'templates', 'sls-tsc', i), os.path.join(module_dir, i), ) os.mkdir(os.path.join(module_dir, 'src')) for i in ['handler.spec.ts', 'handler.ts']: shutil.copyfile( os.path.join(ROOT, 'templates', 'sls-tsc', 'src', i), os.path.join(module_dir, 'src', i), ) LOGGER.info("Sample Serverless TypeScript module created at %s", module_dir)
[ "def", "generate_sample_sls_tsc_module", "(", "env_root", ",", "module_dir", "=", "None", ")", ":", "if", "module_dir", "is", "None", ":", "module_dir", "=", "os", ".", "path", ".", "join", "(", "env_root", ",", "'sampleapp.sls'", ")", "generate_sample_module", ...
Generate skeleton Serverless TypeScript sample module.
[ "Generate", "skeleton", "Serverless", "TypeScript", "sample", "module", "." ]
3f3549ec3bf6e39b9f27d9738a1847f3a4369e7f
https://github.com/onicagroup/runway/blob/3f3549ec3bf6e39b9f27d9738a1847f3a4369e7f/runway/commands/runway/gen_sample.py#L46-L71
train
212,396
onicagroup/runway
runway/commands/runway/gen_sample.py
generate_sample_cdk_tsc_module
def generate_sample_cdk_tsc_module(env_root, module_dir=None): """Generate skeleton CDK TS sample module.""" if module_dir is None: module_dir = os.path.join(env_root, 'sampleapp.cdk') generate_sample_module(module_dir) for i in ['.npmignore', 'cdk.json', 'package.json', 'runway.module.yml', 'tsconfig.json', 'README.md']: shutil.copyfile( os.path.join(ROOT, 'templates', 'cdk-tsc', i), os.path.join(module_dir, i), ) for i in [['bin', 'sample.ts'], ['lib', 'sample-stack.ts']]: os.mkdir(os.path.join(module_dir, i[0])) shutil.copyfile( os.path.join(ROOT, 'templates', 'cdk-tsc', i[0], i[1]), os.path.join(module_dir, i[0], i[1]), ) with open(os.path.join(module_dir, '.gitignore'), 'w') as stream: stream.write('*.js\n') stream.write('*.d.ts\n') stream.write('node_modules\n') LOGGER.info("Sample CDK module created at %s", module_dir) LOGGER.info('To finish its setup, change to the %s directory and execute ' '"npm install" to generate its lockfile.', module_dir)
python
def generate_sample_cdk_tsc_module(env_root, module_dir=None): """Generate skeleton CDK TS sample module.""" if module_dir is None: module_dir = os.path.join(env_root, 'sampleapp.cdk') generate_sample_module(module_dir) for i in ['.npmignore', 'cdk.json', 'package.json', 'runway.module.yml', 'tsconfig.json', 'README.md']: shutil.copyfile( os.path.join(ROOT, 'templates', 'cdk-tsc', i), os.path.join(module_dir, i), ) for i in [['bin', 'sample.ts'], ['lib', 'sample-stack.ts']]: os.mkdir(os.path.join(module_dir, i[0])) shutil.copyfile( os.path.join(ROOT, 'templates', 'cdk-tsc', i[0], i[1]), os.path.join(module_dir, i[0], i[1]), ) with open(os.path.join(module_dir, '.gitignore'), 'w') as stream: stream.write('*.js\n') stream.write('*.d.ts\n') stream.write('node_modules\n') LOGGER.info("Sample CDK module created at %s", module_dir) LOGGER.info('To finish its setup, change to the %s directory and execute ' '"npm install" to generate its lockfile.', module_dir)
[ "def", "generate_sample_cdk_tsc_module", "(", "env_root", ",", "module_dir", "=", "None", ")", ":", "if", "module_dir", "is", "None", ":", "module_dir", "=", "os", ".", "path", ".", "join", "(", "env_root", ",", "'sampleapp.cdk'", ")", "generate_sample_module", ...
Generate skeleton CDK TS sample module.
[ "Generate", "skeleton", "CDK", "TS", "sample", "module", "." ]
3f3549ec3bf6e39b9f27d9738a1847f3a4369e7f
https://github.com/onicagroup/runway/blob/3f3549ec3bf6e39b9f27d9738a1847f3a4369e7f/runway/commands/runway/gen_sample.py#L74-L104
train
212,397
onicagroup/runway
runway/commands/runway/gen_sample.py
generate_sample_cdk_py_module
def generate_sample_cdk_py_module(env_root, module_dir=None): """Generate skeleton CDK python sample module.""" if module_dir is None: module_dir = os.path.join(env_root, 'sampleapp.cdk') generate_sample_module(module_dir) for i in ['app.py', 'cdk.json', 'lambda-index.py', 'package.json', 'runway.module.yml', 'Pipfile']: shutil.copyfile( os.path.join(ROOT, 'templates', 'cdk-py', i), os.path.join(module_dir, i), ) with open(os.path.join(module_dir, '.gitignore'), 'w') as stream: stream.write('node_modules') LOGGER.info("Sample CDK module created at %s", module_dir) LOGGER.info('To finish its setup, change to the %s directory and execute ' '"npm install" and "pipenv update -d --three" to generate its ' 'lockfiles.', module_dir)
python
def generate_sample_cdk_py_module(env_root, module_dir=None): """Generate skeleton CDK python sample module.""" if module_dir is None: module_dir = os.path.join(env_root, 'sampleapp.cdk') generate_sample_module(module_dir) for i in ['app.py', 'cdk.json', 'lambda-index.py', 'package.json', 'runway.module.yml', 'Pipfile']: shutil.copyfile( os.path.join(ROOT, 'templates', 'cdk-py', i), os.path.join(module_dir, i), ) with open(os.path.join(module_dir, '.gitignore'), 'w') as stream: stream.write('node_modules') LOGGER.info("Sample CDK module created at %s", module_dir) LOGGER.info('To finish its setup, change to the %s directory and execute ' '"npm install" and "pipenv update -d --three" to generate its ' 'lockfiles.', module_dir)
[ "def", "generate_sample_cdk_py_module", "(", "env_root", ",", "module_dir", "=", "None", ")", ":", "if", "module_dir", "is", "None", ":", "module_dir", "=", "os", ".", "path", ".", "join", "(", "env_root", ",", "'sampleapp.cdk'", ")", "generate_sample_module", ...
Generate skeleton CDK python sample module.
[ "Generate", "skeleton", "CDK", "python", "sample", "module", "." ]
3f3549ec3bf6e39b9f27d9738a1847f3a4369e7f
https://github.com/onicagroup/runway/blob/3f3549ec3bf6e39b9f27d9738a1847f3a4369e7f/runway/commands/runway/gen_sample.py#L154-L173
train
212,398
onicagroup/runway
runway/commands/runway/gen_sample.py
generate_sample_cfn_module
def generate_sample_cfn_module(env_root, module_dir=None): """Generate skeleton CloudFormation sample module.""" if module_dir is None: module_dir = os.path.join(env_root, 'sampleapp.cfn') generate_sample_module(module_dir) for i in ['stacks.yaml', 'dev-us-east-1.env']: shutil.copyfile( os.path.join(ROOT, 'templates', 'cfn', i), os.path.join(module_dir, i) ) os.mkdir(os.path.join(module_dir, 'templates')) with open(os.path.join(module_dir, 'templates', 'tf_state.yml'), 'w') as stream: stream.write( cfn_flip.flip( check_output( [sys.executable, os.path.join(ROOT, 'templates', 'stacker', 'tfstate_blueprints', 'tf_state.py')] ) ) ) LOGGER.info("Sample CloudFormation module created at %s", module_dir)
python
def generate_sample_cfn_module(env_root, module_dir=None): """Generate skeleton CloudFormation sample module.""" if module_dir is None: module_dir = os.path.join(env_root, 'sampleapp.cfn') generate_sample_module(module_dir) for i in ['stacks.yaml', 'dev-us-east-1.env']: shutil.copyfile( os.path.join(ROOT, 'templates', 'cfn', i), os.path.join(module_dir, i) ) os.mkdir(os.path.join(module_dir, 'templates')) with open(os.path.join(module_dir, 'templates', 'tf_state.yml'), 'w') as stream: stream.write( cfn_flip.flip( check_output( [sys.executable, os.path.join(ROOT, 'templates', 'stacker', 'tfstate_blueprints', 'tf_state.py')] ) ) ) LOGGER.info("Sample CloudFormation module created at %s", module_dir)
[ "def", "generate_sample_cfn_module", "(", "env_root", ",", "module_dir", "=", "None", ")", ":", "if", "module_dir", "is", "None", ":", "module_dir", "=", "os", ".", "path", ".", "join", "(", "env_root", ",", "'sampleapp.cfn'", ")", "generate_sample_module", "(...
Generate skeleton CloudFormation sample module.
[ "Generate", "skeleton", "CloudFormation", "sample", "module", "." ]
3f3549ec3bf6e39b9f27d9738a1847f3a4369e7f
https://github.com/onicagroup/runway/blob/3f3549ec3bf6e39b9f27d9738a1847f3a4369e7f/runway/commands/runway/gen_sample.py#L176-L207
train
212,399