repo
stringlengths
7
55
path
stringlengths
4
223
func_name
stringlengths
1
134
original_string
stringlengths
75
104k
language
stringclasses
1 value
code
stringlengths
75
104k
code_tokens
listlengths
19
28.4k
docstring
stringlengths
1
46.9k
docstring_tokens
listlengths
1
1.97k
sha
stringlengths
40
40
url
stringlengths
87
315
partition
stringclasses
1 value
aio-libs/aiohttp
aiohttp/multipart.py
MultipartResponseWrapper.next
async def next(self) -> Any: """Emits next multipart reader object.""" item = await self.stream.next() if self.stream.at_eof(): await self.release() return item
python
async def next(self) -> Any: """Emits next multipart reader object.""" item = await self.stream.next() if self.stream.at_eof(): await self.release() return item
[ "async", "def", "next", "(", "self", ")", "->", "Any", ":", "item", "=", "await", "self", ".", "stream", ".", "next", "(", ")", "if", "self", ".", "stream", ".", "at_eof", "(", ")", ":", "await", "self", ".", "release", "(", ")", "return", "item" ]
Emits next multipart reader object.
[ "Emits", "next", "multipart", "reader", "object", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/multipart.py#L222-L227
train
aio-libs/aiohttp
aiohttp/multipart.py
BodyPartReader.read
async def read(self, *, decode: bool=False) -> Any: """Reads body part data. decode: Decodes data following by encoding method from Content-Encoding header. If it missed data remains untouched """ if self._at_eof: return b'' data = bytearray() while not self._at_eof: data.extend((await self.read_chunk(self.chunk_size))) if decode: return self.decode(data) return data
python
async def read(self, *, decode: bool=False) -> Any: """Reads body part data. decode: Decodes data following by encoding method from Content-Encoding header. If it missed data remains untouched """ if self._at_eof: return b'' data = bytearray() while not self._at_eof: data.extend((await self.read_chunk(self.chunk_size))) if decode: return self.decode(data) return data
[ "async", "def", "read", "(", "self", ",", "*", ",", "decode", ":", "bool", "=", "False", ")", "->", "Any", ":", "if", "self", ".", "_at_eof", ":", "return", "b''", "data", "=", "bytearray", "(", ")", "while", "not", "self", ".", "_at_eof", ":", "data", ".", "extend", "(", "(", "await", "self", ".", "read_chunk", "(", "self", ".", "chunk_size", ")", ")", ")", "if", "decode", ":", "return", "self", ".", "decode", "(", "data", ")", "return", "data" ]
Reads body part data. decode: Decodes data following by encoding method from Content-Encoding header. If it missed data remains untouched
[ "Reads", "body", "part", "data", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/multipart.py#L277-L291
train
aio-libs/aiohttp
aiohttp/multipart.py
BodyPartReader.read_chunk
async def read_chunk(self, size: int=chunk_size) -> bytes: """Reads body part content chunk of the specified size. size: chunk size """ if self._at_eof: return b'' if self._length: chunk = await self._read_chunk_from_length(size) else: chunk = await self._read_chunk_from_stream(size) self._read_bytes += len(chunk) if self._read_bytes == self._length: self._at_eof = True if self._at_eof: newline = await self._content.readline() assert newline == self._newline, \ 'reader did not read all the data or it is malformed' return chunk
python
async def read_chunk(self, size: int=chunk_size) -> bytes: """Reads body part content chunk of the specified size. size: chunk size """ if self._at_eof: return b'' if self._length: chunk = await self._read_chunk_from_length(size) else: chunk = await self._read_chunk_from_stream(size) self._read_bytes += len(chunk) if self._read_bytes == self._length: self._at_eof = True if self._at_eof: newline = await self._content.readline() assert newline == self._newline, \ 'reader did not read all the data or it is malformed' return chunk
[ "async", "def", "read_chunk", "(", "self", ",", "size", ":", "int", "=", "chunk_size", ")", "->", "bytes", ":", "if", "self", ".", "_at_eof", ":", "return", "b''", "if", "self", ".", "_length", ":", "chunk", "=", "await", "self", ".", "_read_chunk_from_length", "(", "size", ")", "else", ":", "chunk", "=", "await", "self", ".", "_read_chunk_from_stream", "(", "size", ")", "self", ".", "_read_bytes", "+=", "len", "(", "chunk", ")", "if", "self", ".", "_read_bytes", "==", "self", ".", "_length", ":", "self", ".", "_at_eof", "=", "True", "if", "self", ".", "_at_eof", ":", "newline", "=", "await", "self", ".", "_content", ".", "readline", "(", ")", "assert", "newline", "==", "self", ".", "_newline", ",", "'reader did not read all the data or it is malformed'", "return", "chunk" ]
Reads body part content chunk of the specified size. size: chunk size
[ "Reads", "body", "part", "content", "chunk", "of", "the", "specified", "size", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/multipart.py#L293-L312
train
aio-libs/aiohttp
aiohttp/multipart.py
BodyPartReader.readline
async def readline(self) -> bytes: """Reads body part by line by line.""" if self._at_eof: return b'' if self._unread: line = self._unread.popleft() else: line = await self._content.readline() if line.startswith(self._boundary): # the very last boundary may not come with \r\n, # so set single rules for everyone sline = line.rstrip(b'\r\n') boundary = self._boundary last_boundary = self._boundary + b'--' # ensure that we read exactly the boundary, not something alike if sline == boundary or sline == last_boundary: self._at_eof = True self._unread.append(line) return b'' else: next_line = await self._content.readline() if next_line.startswith(self._boundary): # strip newline but only once line = line[:-len(self._newline)] self._unread.append(next_line) return line
python
async def readline(self) -> bytes: """Reads body part by line by line.""" if self._at_eof: return b'' if self._unread: line = self._unread.popleft() else: line = await self._content.readline() if line.startswith(self._boundary): # the very last boundary may not come with \r\n, # so set single rules for everyone sline = line.rstrip(b'\r\n') boundary = self._boundary last_boundary = self._boundary + b'--' # ensure that we read exactly the boundary, not something alike if sline == boundary or sline == last_boundary: self._at_eof = True self._unread.append(line) return b'' else: next_line = await self._content.readline() if next_line.startswith(self._boundary): # strip newline but only once line = line[:-len(self._newline)] self._unread.append(next_line) return line
[ "async", "def", "readline", "(", "self", ")", "->", "bytes", ":", "if", "self", ".", "_at_eof", ":", "return", "b''", "if", "self", ".", "_unread", ":", "line", "=", "self", ".", "_unread", ".", "popleft", "(", ")", "else", ":", "line", "=", "await", "self", ".", "_content", ".", "readline", "(", ")", "if", "line", ".", "startswith", "(", "self", ".", "_boundary", ")", ":", "# the very last boundary may not come with \\r\\n,", "# so set single rules for everyone", "sline", "=", "line", ".", "rstrip", "(", "b'\\r\\n'", ")", "boundary", "=", "self", ".", "_boundary", "last_boundary", "=", "self", ".", "_boundary", "+", "b'--'", "# ensure that we read exactly the boundary, not something alike", "if", "sline", "==", "boundary", "or", "sline", "==", "last_boundary", ":", "self", ".", "_at_eof", "=", "True", "self", ".", "_unread", ".", "append", "(", "line", ")", "return", "b''", "else", ":", "next_line", "=", "await", "self", ".", "_content", ".", "readline", "(", ")", "if", "next_line", ".", "startswith", "(", "self", ".", "_boundary", ")", ":", "# strip newline but only once", "line", "=", "line", "[", ":", "-", "len", "(", "self", ".", "_newline", ")", "]", "self", ".", "_unread", ".", "append", "(", "next_line", ")", "return", "line" ]
Reads body part by line by line.
[ "Reads", "body", "part", "by", "line", "by", "line", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/multipart.py#L362-L390
train
aio-libs/aiohttp
aiohttp/multipart.py
BodyPartReader.release
async def release(self) -> None: """Like read(), but reads all the data to the void.""" if self._at_eof: return while not self._at_eof: await self.read_chunk(self.chunk_size)
python
async def release(self) -> None: """Like read(), but reads all the data to the void.""" if self._at_eof: return while not self._at_eof: await self.read_chunk(self.chunk_size)
[ "async", "def", "release", "(", "self", ")", "->", "None", ":", "if", "self", ".", "_at_eof", ":", "return", "while", "not", "self", ".", "_at_eof", ":", "await", "self", ".", "read_chunk", "(", "self", ".", "chunk_size", ")" ]
Like read(), but reads all the data to the void.
[ "Like", "read", "()", "but", "reads", "all", "the", "data", "to", "the", "void", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/multipart.py#L392-L397
train
aio-libs/aiohttp
aiohttp/multipart.py
BodyPartReader.text
async def text(self, *, encoding: Optional[str]=None) -> str: """Like read(), but assumes that body part contains text data.""" data = await self.read(decode=True) # see https://www.w3.org/TR/html5/forms.html#multipart/form-data-encoding-algorithm # NOQA # and https://dvcs.w3.org/hg/xhr/raw-file/tip/Overview.html#dom-xmlhttprequest-send # NOQA encoding = encoding or self.get_charset(default='utf-8') return data.decode(encoding)
python
async def text(self, *, encoding: Optional[str]=None) -> str: """Like read(), but assumes that body part contains text data.""" data = await self.read(decode=True) # see https://www.w3.org/TR/html5/forms.html#multipart/form-data-encoding-algorithm # NOQA # and https://dvcs.w3.org/hg/xhr/raw-file/tip/Overview.html#dom-xmlhttprequest-send # NOQA encoding = encoding or self.get_charset(default='utf-8') return data.decode(encoding)
[ "async", "def", "text", "(", "self", ",", "*", ",", "encoding", ":", "Optional", "[", "str", "]", "=", "None", ")", "->", "str", ":", "data", "=", "await", "self", ".", "read", "(", "decode", "=", "True", ")", "# see https://www.w3.org/TR/html5/forms.html#multipart/form-data-encoding-algorithm # NOQA", "# and https://dvcs.w3.org/hg/xhr/raw-file/tip/Overview.html#dom-xmlhttprequest-send # NOQA", "encoding", "=", "encoding", "or", "self", ".", "get_charset", "(", "default", "=", "'utf-8'", ")", "return", "data", ".", "decode", "(", "encoding", ")" ]
Like read(), but assumes that body part contains text data.
[ "Like", "read", "()", "but", "assumes", "that", "body", "part", "contains", "text", "data", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/multipart.py#L399-L405
train
aio-libs/aiohttp
aiohttp/multipart.py
BodyPartReader.json
async def json(self, *, encoding: Optional[str]=None) -> Any: """Like read(), but assumes that body parts contains JSON data.""" data = await self.read(decode=True) if not data: return None encoding = encoding or self.get_charset(default='utf-8') return json.loads(data.decode(encoding))
python
async def json(self, *, encoding: Optional[str]=None) -> Any: """Like read(), but assumes that body parts contains JSON data.""" data = await self.read(decode=True) if not data: return None encoding = encoding or self.get_charset(default='utf-8') return json.loads(data.decode(encoding))
[ "async", "def", "json", "(", "self", ",", "*", ",", "encoding", ":", "Optional", "[", "str", "]", "=", "None", ")", "->", "Any", ":", "data", "=", "await", "self", ".", "read", "(", "decode", "=", "True", ")", "if", "not", "data", ":", "return", "None", "encoding", "=", "encoding", "or", "self", ".", "get_charset", "(", "default", "=", "'utf-8'", ")", "return", "json", ".", "loads", "(", "data", ".", "decode", "(", "encoding", ")", ")" ]
Like read(), but assumes that body parts contains JSON data.
[ "Like", "read", "()", "but", "assumes", "that", "body", "parts", "contains", "JSON", "data", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/multipart.py#L407-L413
train
aio-libs/aiohttp
aiohttp/multipart.py
BodyPartReader.form
async def form(self, *, encoding: Optional[str]=None) -> List[Tuple[str, str]]: """Like read(), but assumes that body parts contains form urlencoded data. """ data = await self.read(decode=True) if not data: return [] if encoding is not None: real_encoding = encoding else: real_encoding = self.get_charset(default='utf-8') return parse_qsl(data.rstrip().decode(real_encoding), keep_blank_values=True, encoding=real_encoding)
python
async def form(self, *, encoding: Optional[str]=None) -> List[Tuple[str, str]]: """Like read(), but assumes that body parts contains form urlencoded data. """ data = await self.read(decode=True) if not data: return [] if encoding is not None: real_encoding = encoding else: real_encoding = self.get_charset(default='utf-8') return parse_qsl(data.rstrip().decode(real_encoding), keep_blank_values=True, encoding=real_encoding)
[ "async", "def", "form", "(", "self", ",", "*", ",", "encoding", ":", "Optional", "[", "str", "]", "=", "None", ")", "->", "List", "[", "Tuple", "[", "str", ",", "str", "]", "]", ":", "data", "=", "await", "self", ".", "read", "(", "decode", "=", "True", ")", "if", "not", "data", ":", "return", "[", "]", "if", "encoding", "is", "not", "None", ":", "real_encoding", "=", "encoding", "else", ":", "real_encoding", "=", "self", ".", "get_charset", "(", "default", "=", "'utf-8'", ")", "return", "parse_qsl", "(", "data", ".", "rstrip", "(", ")", ".", "decode", "(", "real_encoding", ")", ",", "keep_blank_values", "=", "True", ",", "encoding", "=", "real_encoding", ")" ]
Like read(), but assumes that body parts contains form urlencoded data.
[ "Like", "read", "()", "but", "assumes", "that", "body", "parts", "contains", "form", "urlencoded", "data", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/multipart.py#L415-L429
train
aio-libs/aiohttp
aiohttp/multipart.py
BodyPartReader.decode
def decode(self, data: bytes) -> bytes: """Decodes data according the specified Content-Encoding or Content-Transfer-Encoding headers value. """ if CONTENT_TRANSFER_ENCODING in self.headers: data = self._decode_content_transfer(data) if CONTENT_ENCODING in self.headers: return self._decode_content(data) return data
python
def decode(self, data: bytes) -> bytes: """Decodes data according the specified Content-Encoding or Content-Transfer-Encoding headers value. """ if CONTENT_TRANSFER_ENCODING in self.headers: data = self._decode_content_transfer(data) if CONTENT_ENCODING in self.headers: return self._decode_content(data) return data
[ "def", "decode", "(", "self", ",", "data", ":", "bytes", ")", "->", "bytes", ":", "if", "CONTENT_TRANSFER_ENCODING", "in", "self", ".", "headers", ":", "data", "=", "self", ".", "_decode_content_transfer", "(", "data", ")", "if", "CONTENT_ENCODING", "in", "self", ".", "headers", ":", "return", "self", ".", "_decode_content", "(", "data", ")", "return", "data" ]
Decodes data according the specified Content-Encoding or Content-Transfer-Encoding headers value.
[ "Decodes", "data", "according", "the", "specified", "Content", "-", "Encoding", "or", "Content", "-", "Transfer", "-", "Encoding", "headers", "value", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/multipart.py#L435-L443
train
aio-libs/aiohttp
aiohttp/multipart.py
BodyPartReader.get_charset
def get_charset(self, default: str) -> str: """Returns charset parameter from Content-Type header or default.""" ctype = self.headers.get(CONTENT_TYPE, '') mimetype = parse_mimetype(ctype) return mimetype.parameters.get('charset', default)
python
def get_charset(self, default: str) -> str: """Returns charset parameter from Content-Type header or default.""" ctype = self.headers.get(CONTENT_TYPE, '') mimetype = parse_mimetype(ctype) return mimetype.parameters.get('charset', default)
[ "def", "get_charset", "(", "self", ",", "default", ":", "str", ")", "->", "str", ":", "ctype", "=", "self", ".", "headers", ".", "get", "(", "CONTENT_TYPE", ",", "''", ")", "mimetype", "=", "parse_mimetype", "(", "ctype", ")", "return", "mimetype", ".", "parameters", ".", "get", "(", "'charset'", ",", "default", ")" ]
Returns charset parameter from Content-Type header or default.
[ "Returns", "charset", "parameter", "from", "Content", "-", "Type", "header", "or", "default", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/multipart.py#L470-L474
train
aio-libs/aiohttp
aiohttp/multipart.py
BodyPartReader.name
def name(self) -> Optional[str]: """Returns name specified in Content-Disposition header or None if missed or header is malformed. """ _, params = parse_content_disposition( self.headers.get(CONTENT_DISPOSITION)) return content_disposition_filename(params, 'name')
python
def name(self) -> Optional[str]: """Returns name specified in Content-Disposition header or None if missed or header is malformed. """ _, params = parse_content_disposition( self.headers.get(CONTENT_DISPOSITION)) return content_disposition_filename(params, 'name')
[ "def", "name", "(", "self", ")", "->", "Optional", "[", "str", "]", ":", "_", ",", "params", "=", "parse_content_disposition", "(", "self", ".", "headers", ".", "get", "(", "CONTENT_DISPOSITION", ")", ")", "return", "content_disposition_filename", "(", "params", ",", "'name'", ")" ]
Returns name specified in Content-Disposition header or None if missed or header is malformed.
[ "Returns", "name", "specified", "in", "Content", "-", "Disposition", "header", "or", "None", "if", "missed", "or", "header", "is", "malformed", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/multipart.py#L477-L484
train
aio-libs/aiohttp
aiohttp/multipart.py
MultipartReader.from_response
def from_response(cls, response: 'ClientResponse') -> Any: """Constructs reader instance from HTTP response. :param response: :class:`~aiohttp.client.ClientResponse` instance """ obj = cls.response_wrapper_cls(response, cls(response.headers, response.content)) return obj
python
def from_response(cls, response: 'ClientResponse') -> Any: """Constructs reader instance from HTTP response. :param response: :class:`~aiohttp.client.ClientResponse` instance """ obj = cls.response_wrapper_cls(response, cls(response.headers, response.content)) return obj
[ "def", "from_response", "(", "cls", ",", "response", ":", "'ClientResponse'", ")", "->", "Any", ":", "obj", "=", "cls", ".", "response_wrapper_cls", "(", "response", ",", "cls", "(", "response", ".", "headers", ",", "response", ".", "content", ")", ")", "return", "obj" ]
Constructs reader instance from HTTP response. :param response: :class:`~aiohttp.client.ClientResponse` instance
[ "Constructs", "reader", "instance", "from", "HTTP", "response", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/multipart.py#L557-L564
train
aio-libs/aiohttp
aiohttp/multipart.py
MultipartReader.next
async def next(self) -> Any: """Emits the next multipart body part.""" # So, if we're at BOF, we need to skip till the boundary. if self._at_eof: return await self._maybe_release_last_part() if self._at_bof: await self._read_until_first_boundary() self._at_bof = False else: await self._read_boundary() if self._at_eof: # we just read the last boundary, nothing to do there return self._last_part = await self.fetch_next_part() return self._last_part
python
async def next(self) -> Any: """Emits the next multipart body part.""" # So, if we're at BOF, we need to skip till the boundary. if self._at_eof: return await self._maybe_release_last_part() if self._at_bof: await self._read_until_first_boundary() self._at_bof = False else: await self._read_boundary() if self._at_eof: # we just read the last boundary, nothing to do there return self._last_part = await self.fetch_next_part() return self._last_part
[ "async", "def", "next", "(", "self", ")", "->", "Any", ":", "# So, if we're at BOF, we need to skip till the boundary.", "if", "self", ".", "_at_eof", ":", "return", "await", "self", ".", "_maybe_release_last_part", "(", ")", "if", "self", ".", "_at_bof", ":", "await", "self", ".", "_read_until_first_boundary", "(", ")", "self", ".", "_at_bof", "=", "False", "else", ":", "await", "self", ".", "_read_boundary", "(", ")", "if", "self", ".", "_at_eof", ":", "# we just read the last boundary, nothing to do there", "return", "self", ".", "_last_part", "=", "await", "self", ".", "fetch_next_part", "(", ")", "return", "self", ".", "_last_part" ]
Emits the next multipart body part.
[ "Emits", "the", "next", "multipart", "body", "part", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/multipart.py#L572-L586
train
aio-libs/aiohttp
aiohttp/multipart.py
MultipartReader.release
async def release(self) -> None: """Reads all the body parts to the void till the final boundary.""" while not self._at_eof: item = await self.next() if item is None: break await item.release()
python
async def release(self) -> None: """Reads all the body parts to the void till the final boundary.""" while not self._at_eof: item = await self.next() if item is None: break await item.release()
[ "async", "def", "release", "(", "self", ")", "->", "None", ":", "while", "not", "self", ".", "_at_eof", ":", "item", "=", "await", "self", ".", "next", "(", ")", "if", "item", "is", "None", ":", "break", "await", "item", ".", "release", "(", ")" ]
Reads all the body parts to the void till the final boundary.
[ "Reads", "all", "the", "body", "parts", "to", "the", "void", "till", "the", "final", "boundary", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/multipart.py#L588-L594
train
aio-libs/aiohttp
aiohttp/multipart.py
MultipartReader._get_part_reader
def _get_part_reader(self, headers: 'CIMultiDictProxy[str]') -> Any: """Dispatches the response by the `Content-Type` header, returning suitable reader instance. :param dict headers: Response headers """ ctype = headers.get(CONTENT_TYPE, '') mimetype = parse_mimetype(ctype) if mimetype.type == 'multipart': if self.multipart_reader_cls is None: return type(self)(headers, self._content) return self.multipart_reader_cls( headers, self._content, _newline=self._newline ) else: return self.part_reader_cls( self._boundary, headers, self._content, _newline=self._newline )
python
def _get_part_reader(self, headers: 'CIMultiDictProxy[str]') -> Any: """Dispatches the response by the `Content-Type` header, returning suitable reader instance. :param dict headers: Response headers """ ctype = headers.get(CONTENT_TYPE, '') mimetype = parse_mimetype(ctype) if mimetype.type == 'multipart': if self.multipart_reader_cls is None: return type(self)(headers, self._content) return self.multipart_reader_cls( headers, self._content, _newline=self._newline ) else: return self.part_reader_cls( self._boundary, headers, self._content, _newline=self._newline )
[ "def", "_get_part_reader", "(", "self", ",", "headers", ":", "'CIMultiDictProxy[str]'", ")", "->", "Any", ":", "ctype", "=", "headers", ".", "get", "(", "CONTENT_TYPE", ",", "''", ")", "mimetype", "=", "parse_mimetype", "(", "ctype", ")", "if", "mimetype", ".", "type", "==", "'multipart'", ":", "if", "self", ".", "multipart_reader_cls", "is", "None", ":", "return", "type", "(", "self", ")", "(", "headers", ",", "self", ".", "_content", ")", "return", "self", ".", "multipart_reader_cls", "(", "headers", ",", "self", ".", "_content", ",", "_newline", "=", "self", ".", "_newline", ")", "else", ":", "return", "self", ".", "part_reader_cls", "(", "self", ".", "_boundary", ",", "headers", ",", "self", ".", "_content", ",", "_newline", "=", "self", ".", "_newline", ")" ]
Dispatches the response by the `Content-Type` header, returning suitable reader instance. :param dict headers: Response headers
[ "Dispatches", "the", "response", "by", "the", "Content", "-", "Type", "header", "returning", "suitable", "reader", "instance", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/multipart.py#L601-L619
train
aio-libs/aiohttp
aiohttp/multipart.py
MultipartReader._maybe_release_last_part
async def _maybe_release_last_part(self) -> None: """Ensures that the last read body part is read completely.""" if self._last_part is not None: if not self._last_part.at_eof(): await self._last_part.release() self._unread.extend(self._last_part._unread) self._last_part = None
python
async def _maybe_release_last_part(self) -> None: """Ensures that the last read body part is read completely.""" if self._last_part is not None: if not self._last_part.at_eof(): await self._last_part.release() self._unread.extend(self._last_part._unread) self._last_part = None
[ "async", "def", "_maybe_release_last_part", "(", "self", ")", "->", "None", ":", "if", "self", ".", "_last_part", "is", "not", "None", ":", "if", "not", "self", ".", "_last_part", ".", "at_eof", "(", ")", ":", "await", "self", ".", "_last_part", ".", "release", "(", ")", "self", ".", "_unread", ".", "extend", "(", "self", ".", "_last_part", ".", "_unread", ")", "self", ".", "_last_part", "=", "None" ]
Ensures that the last read body part is read completely.
[ "Ensures", "that", "the", "last", "read", "body", "part", "is", "read", "completely", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/multipart.py#L698-L704
train
aio-libs/aiohttp
aiohttp/multipart.py
MultipartWriter._boundary_value
def _boundary_value(self) -> str: """Wrap boundary parameter value in quotes, if necessary. Reads self.boundary and returns a unicode sting. """ # Refer to RFCs 7231, 7230, 5234. # # parameter = token "=" ( token / quoted-string ) # token = 1*tchar # quoted-string = DQUOTE *( qdtext / quoted-pair ) DQUOTE # qdtext = HTAB / SP / %x21 / %x23-5B / %x5D-7E / obs-text # obs-text = %x80-FF # quoted-pair = "\" ( HTAB / SP / VCHAR / obs-text ) # tchar = "!" / "#" / "$" / "%" / "&" / "'" / "*" # / "+" / "-" / "." / "^" / "_" / "`" / "|" / "~" # / DIGIT / ALPHA # ; any VCHAR, except delimiters # VCHAR = %x21-7E value = self._boundary if re.match(self._valid_tchar_regex, value): return value.decode('ascii') # cannot fail if re.search(self._invalid_qdtext_char_regex, value): raise ValueError("boundary value contains invalid characters") # escape %x5C and %x22 quoted_value_content = value.replace(b'\\', b'\\\\') quoted_value_content = quoted_value_content.replace(b'"', b'\\"') return '"' + quoted_value_content.decode('ascii') + '"'
python
def _boundary_value(self) -> str: """Wrap boundary parameter value in quotes, if necessary. Reads self.boundary and returns a unicode sting. """ # Refer to RFCs 7231, 7230, 5234. # # parameter = token "=" ( token / quoted-string ) # token = 1*tchar # quoted-string = DQUOTE *( qdtext / quoted-pair ) DQUOTE # qdtext = HTAB / SP / %x21 / %x23-5B / %x5D-7E / obs-text # obs-text = %x80-FF # quoted-pair = "\" ( HTAB / SP / VCHAR / obs-text ) # tchar = "!" / "#" / "$" / "%" / "&" / "'" / "*" # / "+" / "-" / "." / "^" / "_" / "`" / "|" / "~" # / DIGIT / ALPHA # ; any VCHAR, except delimiters # VCHAR = %x21-7E value = self._boundary if re.match(self._valid_tchar_regex, value): return value.decode('ascii') # cannot fail if re.search(self._invalid_qdtext_char_regex, value): raise ValueError("boundary value contains invalid characters") # escape %x5C and %x22 quoted_value_content = value.replace(b'\\', b'\\\\') quoted_value_content = quoted_value_content.replace(b'"', b'\\"') return '"' + quoted_value_content.decode('ascii') + '"'
[ "def", "_boundary_value", "(", "self", ")", "->", "str", ":", "# Refer to RFCs 7231, 7230, 5234.", "#", "# parameter = token \"=\" ( token / quoted-string )", "# token = 1*tchar", "# quoted-string = DQUOTE *( qdtext / quoted-pair ) DQUOTE", "# qdtext = HTAB / SP / %x21 / %x23-5B / %x5D-7E / obs-text", "# obs-text = %x80-FF", "# quoted-pair = \"\\\" ( HTAB / SP / VCHAR / obs-text )", "# tchar = \"!\" / \"#\" / \"$\" / \"%\" / \"&\" / \"'\" / \"*\"", "# / \"+\" / \"-\" / \".\" / \"^\" / \"_\" / \"`\" / \"|\" / \"~\"", "# / DIGIT / ALPHA", "# ; any VCHAR, except delimiters", "# VCHAR = %x21-7E", "value", "=", "self", ".", "_boundary", "if", "re", ".", "match", "(", "self", ".", "_valid_tchar_regex", ",", "value", ")", ":", "return", "value", ".", "decode", "(", "'ascii'", ")", "# cannot fail", "if", "re", ".", "search", "(", "self", ".", "_invalid_qdtext_char_regex", ",", "value", ")", ":", "raise", "ValueError", "(", "\"boundary value contains invalid characters\"", ")", "# escape %x5C and %x22", "quoted_value_content", "=", "value", ".", "replace", "(", "b'\\\\'", ",", "b'\\\\\\\\'", ")", "quoted_value_content", "=", "quoted_value_content", ".", "replace", "(", "b'\"'", ",", "b'\\\\\"'", ")", "return", "'\"'", "+", "quoted_value_content", ".", "decode", "(", "'ascii'", ")", "+", "'\"'" ]
Wrap boundary parameter value in quotes, if necessary. Reads self.boundary and returns a unicode sting.
[ "Wrap", "boundary", "parameter", "value", "in", "quotes", "if", "necessary", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/multipart.py#L752-L781
train
aio-libs/aiohttp
aiohttp/multipart.py
MultipartWriter.append_payload
def append_payload(self, payload: Payload) -> Payload: """Adds a new body part to multipart writer.""" # compression encoding = payload.headers.get(CONTENT_ENCODING, '').lower() # type: Optional[str] # noqa if encoding and encoding not in ('deflate', 'gzip', 'identity'): raise RuntimeError('unknown content encoding: {}'.format(encoding)) if encoding == 'identity': encoding = None # te encoding te_encoding = payload.headers.get( CONTENT_TRANSFER_ENCODING, '').lower() # type: Optional[str] # noqa if te_encoding not in ('', 'base64', 'quoted-printable', 'binary'): raise RuntimeError('unknown content transfer encoding: {}' ''.format(te_encoding)) if te_encoding == 'binary': te_encoding = None # size size = payload.size if size is not None and not (encoding or te_encoding): payload.headers[CONTENT_LENGTH] = str(size) self._parts.append((payload, encoding, te_encoding)) # type: ignore return payload
python
def append_payload(self, payload: Payload) -> Payload: """Adds a new body part to multipart writer.""" # compression encoding = payload.headers.get(CONTENT_ENCODING, '').lower() # type: Optional[str] # noqa if encoding and encoding not in ('deflate', 'gzip', 'identity'): raise RuntimeError('unknown content encoding: {}'.format(encoding)) if encoding == 'identity': encoding = None # te encoding te_encoding = payload.headers.get( CONTENT_TRANSFER_ENCODING, '').lower() # type: Optional[str] # noqa if te_encoding not in ('', 'base64', 'quoted-printable', 'binary'): raise RuntimeError('unknown content transfer encoding: {}' ''.format(te_encoding)) if te_encoding == 'binary': te_encoding = None # size size = payload.size if size is not None and not (encoding or te_encoding): payload.headers[CONTENT_LENGTH] = str(size) self._parts.append((payload, encoding, te_encoding)) # type: ignore return payload
[ "def", "append_payload", "(", "self", ",", "payload", ":", "Payload", ")", "->", "Payload", ":", "# compression", "encoding", "=", "payload", ".", "headers", ".", "get", "(", "CONTENT_ENCODING", ",", "''", ")", ".", "lower", "(", ")", "# type: Optional[str] # noqa", "if", "encoding", "and", "encoding", "not", "in", "(", "'deflate'", ",", "'gzip'", ",", "'identity'", ")", ":", "raise", "RuntimeError", "(", "'unknown content encoding: {}'", ".", "format", "(", "encoding", ")", ")", "if", "encoding", "==", "'identity'", ":", "encoding", "=", "None", "# te encoding", "te_encoding", "=", "payload", ".", "headers", ".", "get", "(", "CONTENT_TRANSFER_ENCODING", ",", "''", ")", ".", "lower", "(", ")", "# type: Optional[str] # noqa", "if", "te_encoding", "not", "in", "(", "''", ",", "'base64'", ",", "'quoted-printable'", ",", "'binary'", ")", ":", "raise", "RuntimeError", "(", "'unknown content transfer encoding: {}'", "''", ".", "format", "(", "te_encoding", ")", ")", "if", "te_encoding", "==", "'binary'", ":", "te_encoding", "=", "None", "# size", "size", "=", "payload", ".", "size", "if", "size", "is", "not", "None", "and", "not", "(", "encoding", "or", "te_encoding", ")", ":", "payload", ".", "headers", "[", "CONTENT_LENGTH", "]", "=", "str", "(", "size", ")", "self", ".", "_parts", ".", "append", "(", "(", "payload", ",", "encoding", ",", "te_encoding", ")", ")", "# type: ignore", "return", "payload" ]
Adds a new body part to multipart writer.
[ "Adds", "a", "new", "body", "part", "to", "multipart", "writer", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/multipart.py#L806-L830
train
aio-libs/aiohttp
aiohttp/multipart.py
MultipartWriter.append_json
def append_json( self, obj: Any, headers: Optional['MultiMapping[str]']=None ) -> Payload: """Helper to append JSON part.""" if headers is None: headers = CIMultiDict() return self.append_payload(JsonPayload(obj, headers=headers))
python
def append_json( self, obj: Any, headers: Optional['MultiMapping[str]']=None ) -> Payload: """Helper to append JSON part.""" if headers is None: headers = CIMultiDict() return self.append_payload(JsonPayload(obj, headers=headers))
[ "def", "append_json", "(", "self", ",", "obj", ":", "Any", ",", "headers", ":", "Optional", "[", "'MultiMapping[str]'", "]", "=", "None", ")", "->", "Payload", ":", "if", "headers", "is", "None", ":", "headers", "=", "CIMultiDict", "(", ")", "return", "self", ".", "append_payload", "(", "JsonPayload", "(", "obj", ",", "headers", "=", "headers", ")", ")" ]
Helper to append JSON part.
[ "Helper", "to", "append", "JSON", "part", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/multipart.py#L832-L841
train
aio-libs/aiohttp
aiohttp/multipart.py
MultipartWriter.append_form
def append_form( self, obj: Union[Sequence[Tuple[str, str]], Mapping[str, str]], headers: Optional['MultiMapping[str]']=None ) -> Payload: """Helper to append form urlencoded part.""" assert isinstance(obj, (Sequence, Mapping)) if headers is None: headers = CIMultiDict() if isinstance(obj, Mapping): obj = list(obj.items()) data = urlencode(obj, doseq=True) return self.append_payload( StringPayload(data, headers=headers, content_type='application/x-www-form-urlencoded'))
python
def append_form( self, obj: Union[Sequence[Tuple[str, str]], Mapping[str, str]], headers: Optional['MultiMapping[str]']=None ) -> Payload: """Helper to append form urlencoded part.""" assert isinstance(obj, (Sequence, Mapping)) if headers is None: headers = CIMultiDict() if isinstance(obj, Mapping): obj = list(obj.items()) data = urlencode(obj, doseq=True) return self.append_payload( StringPayload(data, headers=headers, content_type='application/x-www-form-urlencoded'))
[ "def", "append_form", "(", "self", ",", "obj", ":", "Union", "[", "Sequence", "[", "Tuple", "[", "str", ",", "str", "]", "]", ",", "Mapping", "[", "str", ",", "str", "]", "]", ",", "headers", ":", "Optional", "[", "'MultiMapping[str]'", "]", "=", "None", ")", "->", "Payload", ":", "assert", "isinstance", "(", "obj", ",", "(", "Sequence", ",", "Mapping", ")", ")", "if", "headers", "is", "None", ":", "headers", "=", "CIMultiDict", "(", ")", "if", "isinstance", "(", "obj", ",", "Mapping", ")", ":", "obj", "=", "list", "(", "obj", ".", "items", "(", ")", ")", "data", "=", "urlencode", "(", "obj", ",", "doseq", "=", "True", ")", "return", "self", ".", "append_payload", "(", "StringPayload", "(", "data", ",", "headers", "=", "headers", ",", "content_type", "=", "'application/x-www-form-urlencoded'", ")", ")" ]
Helper to append form urlencoded part.
[ "Helper", "to", "append", "form", "urlencoded", "part", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/multipart.py#L843-L861
train
aio-libs/aiohttp
aiohttp/multipart.py
MultipartWriter.size
def size(self) -> Optional[int]: """Size of the payload.""" if not self._parts: return 0 total = 0 for part, encoding, te_encoding in self._parts: if encoding or te_encoding or part.size is None: return None total += int( 2 + len(self._boundary) + 2 + # b'--'+self._boundary+b'\r\n' part.size + len(part._binary_headers) + 2 # b'\r\n' ) total += 2 + len(self._boundary) + 4 # b'--'+self._boundary+b'--\r\n' return total
python
def size(self) -> Optional[int]: """Size of the payload.""" if not self._parts: return 0 total = 0 for part, encoding, te_encoding in self._parts: if encoding or te_encoding or part.size is None: return None total += int( 2 + len(self._boundary) + 2 + # b'--'+self._boundary+b'\r\n' part.size + len(part._binary_headers) + 2 # b'\r\n' ) total += 2 + len(self._boundary) + 4 # b'--'+self._boundary+b'--\r\n' return total
[ "def", "size", "(", "self", ")", "->", "Optional", "[", "int", "]", ":", "if", "not", "self", ".", "_parts", ":", "return", "0", "total", "=", "0", "for", "part", ",", "encoding", ",", "te_encoding", "in", "self", ".", "_parts", ":", "if", "encoding", "or", "te_encoding", "or", "part", ".", "size", "is", "None", ":", "return", "None", "total", "+=", "int", "(", "2", "+", "len", "(", "self", ".", "_boundary", ")", "+", "2", "+", "# b'--'+self._boundary+b'\\r\\n'", "part", ".", "size", "+", "len", "(", "part", ".", "_binary_headers", ")", "+", "2", "# b'\\r\\n'", ")", "total", "+=", "2", "+", "len", "(", "self", ".", "_boundary", ")", "+", "4", "# b'--'+self._boundary+b'--\\r\\n'", "return", "total" ]
Size of the payload.
[ "Size", "of", "the", "payload", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/multipart.py#L864-L881
train
aio-libs/aiohttp
aiohttp/multipart.py
MultipartWriter.write
async def write(self, writer: Any, close_boundary: bool=True) -> None: """Write body.""" if not self._parts: return for part, encoding, te_encoding in self._parts: await writer.write(b'--' + self._boundary + b'\r\n') await writer.write(part._binary_headers) if encoding or te_encoding: w = MultipartPayloadWriter(writer) if encoding: w.enable_compression(encoding) if te_encoding: w.enable_encoding(te_encoding) await part.write(w) # type: ignore await w.write_eof() else: await part.write(writer) await writer.write(b'\r\n') if close_boundary: await writer.write(b'--' + self._boundary + b'--\r\n')
python
async def write(self, writer: Any, close_boundary: bool=True) -> None: """Write body.""" if not self._parts: return for part, encoding, te_encoding in self._parts: await writer.write(b'--' + self._boundary + b'\r\n') await writer.write(part._binary_headers) if encoding or te_encoding: w = MultipartPayloadWriter(writer) if encoding: w.enable_compression(encoding) if te_encoding: w.enable_encoding(te_encoding) await part.write(w) # type: ignore await w.write_eof() else: await part.write(writer) await writer.write(b'\r\n') if close_boundary: await writer.write(b'--' + self._boundary + b'--\r\n')
[ "async", "def", "write", "(", "self", ",", "writer", ":", "Any", ",", "close_boundary", ":", "bool", "=", "True", ")", "->", "None", ":", "if", "not", "self", ".", "_parts", ":", "return", "for", "part", ",", "encoding", ",", "te_encoding", "in", "self", ".", "_parts", ":", "await", "writer", ".", "write", "(", "b'--'", "+", "self", ".", "_boundary", "+", "b'\\r\\n'", ")", "await", "writer", ".", "write", "(", "part", ".", "_binary_headers", ")", "if", "encoding", "or", "te_encoding", ":", "w", "=", "MultipartPayloadWriter", "(", "writer", ")", "if", "encoding", ":", "w", ".", "enable_compression", "(", "encoding", ")", "if", "te_encoding", ":", "w", ".", "enable_encoding", "(", "te_encoding", ")", "await", "part", ".", "write", "(", "w", ")", "# type: ignore", "await", "w", ".", "write_eof", "(", ")", "else", ":", "await", "part", ".", "write", "(", "writer", ")", "await", "writer", ".", "write", "(", "b'\\r\\n'", ")", "if", "close_boundary", ":", "await", "writer", ".", "write", "(", "b'--'", "+", "self", ".", "_boundary", "+", "b'--\\r\\n'", ")" ]
Write body.
[ "Write", "body", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/multipart.py#L883-L907
train
aio-libs/aiohttp
aiohttp/client_reqrep.py
ClientRequest.update_host
def update_host(self, url: URL) -> None: """Update destination host, port and connection type (ssl).""" # get host/port if not url.host: raise InvalidURL(url) # basic auth info username, password = url.user, url.password if username: self.auth = helpers.BasicAuth(username, password or '')
python
def update_host(self, url: URL) -> None: """Update destination host, port and connection type (ssl).""" # get host/port if not url.host: raise InvalidURL(url) # basic auth info username, password = url.user, url.password if username: self.auth = helpers.BasicAuth(username, password or '')
[ "def", "update_host", "(", "self", ",", "url", ":", "URL", ")", "->", "None", ":", "# get host/port", "if", "not", "url", ".", "host", ":", "raise", "InvalidURL", "(", "url", ")", "# basic auth info", "username", ",", "password", "=", "url", ".", "user", ",", "url", ".", "password", "if", "username", ":", "self", ".", "auth", "=", "helpers", ".", "BasicAuth", "(", "username", ",", "password", "or", "''", ")" ]
Update destination host, port and connection type (ssl).
[ "Update", "destination", "host", "port", "and", "connection", "type", "(", "ssl", ")", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/client_reqrep.py#L296-L305
train
aio-libs/aiohttp
aiohttp/client_reqrep.py
ClientRequest.update_version
def update_version(self, version: Union[http.HttpVersion, str]) -> None: """Convert request version to two elements tuple. parser HTTP version '1.1' => (1, 1) """ if isinstance(version, str): v = [l.strip() for l in version.split('.', 1)] try: version = http.HttpVersion(int(v[0]), int(v[1])) except ValueError: raise ValueError( 'Can not parse http version number: {}' .format(version)) from None self.version = version
python
def update_version(self, version: Union[http.HttpVersion, str]) -> None: """Convert request version to two elements tuple. parser HTTP version '1.1' => (1, 1) """ if isinstance(version, str): v = [l.strip() for l in version.split('.', 1)] try: version = http.HttpVersion(int(v[0]), int(v[1])) except ValueError: raise ValueError( 'Can not parse http version number: {}' .format(version)) from None self.version = version
[ "def", "update_version", "(", "self", ",", "version", ":", "Union", "[", "http", ".", "HttpVersion", ",", "str", "]", ")", "->", "None", ":", "if", "isinstance", "(", "version", ",", "str", ")", ":", "v", "=", "[", "l", ".", "strip", "(", ")", "for", "l", "in", "version", ".", "split", "(", "'.'", ",", "1", ")", "]", "try", ":", "version", "=", "http", ".", "HttpVersion", "(", "int", "(", "v", "[", "0", "]", ")", ",", "int", "(", "v", "[", "1", "]", ")", ")", "except", "ValueError", ":", "raise", "ValueError", "(", "'Can not parse http version number: {}'", ".", "format", "(", "version", ")", ")", "from", "None", "self", ".", "version", "=", "version" ]
Convert request version to two elements tuple. parser HTTP version '1.1' => (1, 1)
[ "Convert", "request", "version", "to", "two", "elements", "tuple", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/client_reqrep.py#L307-L320
train
aio-libs/aiohttp
aiohttp/client_reqrep.py
ClientRequest.update_headers
def update_headers(self, headers: Optional[LooseHeaders]) -> None: """Update request headers.""" self.headers = CIMultiDict() # type: CIMultiDict[str] # add host netloc = cast(str, self.url.raw_host) if helpers.is_ipv6_address(netloc): netloc = '[{}]'.format(netloc) if not self.url.is_default_port(): netloc += ':' + str(self.url.port) self.headers[hdrs.HOST] = netloc if headers: if isinstance(headers, (dict, MultiDictProxy, MultiDict)): headers = headers.items() # type: ignore for key, value in headers: # A special case for Host header if key.lower() == 'host': self.headers[key] = value else: self.headers.add(key, value)
python
def update_headers(self, headers: Optional[LooseHeaders]) -> None: """Update request headers.""" self.headers = CIMultiDict() # type: CIMultiDict[str] # add host netloc = cast(str, self.url.raw_host) if helpers.is_ipv6_address(netloc): netloc = '[{}]'.format(netloc) if not self.url.is_default_port(): netloc += ':' + str(self.url.port) self.headers[hdrs.HOST] = netloc if headers: if isinstance(headers, (dict, MultiDictProxy, MultiDict)): headers = headers.items() # type: ignore for key, value in headers: # A special case for Host header if key.lower() == 'host': self.headers[key] = value else: self.headers.add(key, value)
[ "def", "update_headers", "(", "self", ",", "headers", ":", "Optional", "[", "LooseHeaders", "]", ")", "->", "None", ":", "self", ".", "headers", "=", "CIMultiDict", "(", ")", "# type: CIMultiDict[str]", "# add host", "netloc", "=", "cast", "(", "str", ",", "self", ".", "url", ".", "raw_host", ")", "if", "helpers", ".", "is_ipv6_address", "(", "netloc", ")", ":", "netloc", "=", "'[{}]'", ".", "format", "(", "netloc", ")", "if", "not", "self", ".", "url", ".", "is_default_port", "(", ")", ":", "netloc", "+=", "':'", "+", "str", "(", "self", ".", "url", ".", "port", ")", "self", ".", "headers", "[", "hdrs", ".", "HOST", "]", "=", "netloc", "if", "headers", ":", "if", "isinstance", "(", "headers", ",", "(", "dict", ",", "MultiDictProxy", ",", "MultiDict", ")", ")", ":", "headers", "=", "headers", ".", "items", "(", ")", "# type: ignore", "for", "key", ",", "value", "in", "headers", ":", "# A special case for Host header", "if", "key", ".", "lower", "(", ")", "==", "'host'", ":", "self", ".", "headers", "[", "key", "]", "=", "value", "else", ":", "self", ".", "headers", ".", "add", "(", "key", ",", "value", ")" ]
Update request headers.
[ "Update", "request", "headers", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/client_reqrep.py#L322-L343
train
aio-libs/aiohttp
aiohttp/client_reqrep.py
ClientRequest.update_cookies
def update_cookies(self, cookies: Optional[LooseCookies]) -> None: """Update request cookies header.""" if not cookies: return c = SimpleCookie() if hdrs.COOKIE in self.headers: c.load(self.headers.get(hdrs.COOKIE, '')) del self.headers[hdrs.COOKIE] if isinstance(cookies, Mapping): iter_cookies = cookies.items() else: iter_cookies = cookies # type: ignore for name, value in iter_cookies: if isinstance(value, Morsel): # Preserve coded_value mrsl_val = value.get(value.key, Morsel()) mrsl_val.set(value.key, value.value, value.coded_value) # type: ignore # noqa c[name] = mrsl_val else: c[name] = value # type: ignore self.headers[hdrs.COOKIE] = c.output(header='', sep=';').strip()
python
def update_cookies(self, cookies: Optional[LooseCookies]) -> None: """Update request cookies header.""" if not cookies: return c = SimpleCookie() if hdrs.COOKIE in self.headers: c.load(self.headers.get(hdrs.COOKIE, '')) del self.headers[hdrs.COOKIE] if isinstance(cookies, Mapping): iter_cookies = cookies.items() else: iter_cookies = cookies # type: ignore for name, value in iter_cookies: if isinstance(value, Morsel): # Preserve coded_value mrsl_val = value.get(value.key, Morsel()) mrsl_val.set(value.key, value.value, value.coded_value) # type: ignore # noqa c[name] = mrsl_val else: c[name] = value # type: ignore self.headers[hdrs.COOKIE] = c.output(header='', sep=';').strip()
[ "def", "update_cookies", "(", "self", ",", "cookies", ":", "Optional", "[", "LooseCookies", "]", ")", "->", "None", ":", "if", "not", "cookies", ":", "return", "c", "=", "SimpleCookie", "(", ")", "if", "hdrs", ".", "COOKIE", "in", "self", ".", "headers", ":", "c", ".", "load", "(", "self", ".", "headers", ".", "get", "(", "hdrs", ".", "COOKIE", ",", "''", ")", ")", "del", "self", ".", "headers", "[", "hdrs", ".", "COOKIE", "]", "if", "isinstance", "(", "cookies", ",", "Mapping", ")", ":", "iter_cookies", "=", "cookies", ".", "items", "(", ")", "else", ":", "iter_cookies", "=", "cookies", "# type: ignore", "for", "name", ",", "value", "in", "iter_cookies", ":", "if", "isinstance", "(", "value", ",", "Morsel", ")", ":", "# Preserve coded_value", "mrsl_val", "=", "value", ".", "get", "(", "value", ".", "key", ",", "Morsel", "(", ")", ")", "mrsl_val", ".", "set", "(", "value", ".", "key", ",", "value", ".", "value", ",", "value", ".", "coded_value", ")", "# type: ignore # noqa", "c", "[", "name", "]", "=", "mrsl_val", "else", ":", "c", "[", "name", "]", "=", "value", "# type: ignore", "self", ".", "headers", "[", "hdrs", ".", "COOKIE", "]", "=", "c", ".", "output", "(", "header", "=", "''", ",", "sep", "=", "';'", ")", ".", "strip", "(", ")" ]
Update request cookies header.
[ "Update", "request", "cookies", "header", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/client_reqrep.py#L358-L381
train
aio-libs/aiohttp
aiohttp/client_reqrep.py
ClientRequest.update_content_encoding
def update_content_encoding(self, data: Any) -> None: """Set request content encoding.""" if not data: return enc = self.headers.get(hdrs.CONTENT_ENCODING, '').lower() if enc: if self.compress: raise ValueError( 'compress can not be set ' 'if Content-Encoding header is set') elif self.compress: if not isinstance(self.compress, str): self.compress = 'deflate' self.headers[hdrs.CONTENT_ENCODING] = self.compress self.chunked = True
python
def update_content_encoding(self, data: Any) -> None: """Set request content encoding.""" if not data: return enc = self.headers.get(hdrs.CONTENT_ENCODING, '').lower() if enc: if self.compress: raise ValueError( 'compress can not be set ' 'if Content-Encoding header is set') elif self.compress: if not isinstance(self.compress, str): self.compress = 'deflate' self.headers[hdrs.CONTENT_ENCODING] = self.compress self.chunked = True
[ "def", "update_content_encoding", "(", "self", ",", "data", ":", "Any", ")", "->", "None", ":", "if", "not", "data", ":", "return", "enc", "=", "self", ".", "headers", ".", "get", "(", "hdrs", ".", "CONTENT_ENCODING", ",", "''", ")", ".", "lower", "(", ")", "if", "enc", ":", "if", "self", ".", "compress", ":", "raise", "ValueError", "(", "'compress can not be set '", "'if Content-Encoding header is set'", ")", "elif", "self", ".", "compress", ":", "if", "not", "isinstance", "(", "self", ".", "compress", ",", "str", ")", ":", "self", ".", "compress", "=", "'deflate'", "self", ".", "headers", "[", "hdrs", ".", "CONTENT_ENCODING", "]", "=", "self", ".", "compress", "self", ".", "chunked", "=", "True" ]
Set request content encoding.
[ "Set", "request", "content", "encoding", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/client_reqrep.py#L383-L398
train
aio-libs/aiohttp
aiohttp/client_reqrep.py
ClientRequest.update_transfer_encoding
def update_transfer_encoding(self) -> None: """Analyze transfer-encoding header.""" te = self.headers.get(hdrs.TRANSFER_ENCODING, '').lower() if 'chunked' in te: if self.chunked: raise ValueError( 'chunked can not be set ' 'if "Transfer-Encoding: chunked" header is set') elif self.chunked: if hdrs.CONTENT_LENGTH in self.headers: raise ValueError( 'chunked can not be set ' 'if Content-Length header is set') self.headers[hdrs.TRANSFER_ENCODING] = 'chunked' else: if hdrs.CONTENT_LENGTH not in self.headers: self.headers[hdrs.CONTENT_LENGTH] = str(len(self.body))
python
def update_transfer_encoding(self) -> None: """Analyze transfer-encoding header.""" te = self.headers.get(hdrs.TRANSFER_ENCODING, '').lower() if 'chunked' in te: if self.chunked: raise ValueError( 'chunked can not be set ' 'if "Transfer-Encoding: chunked" header is set') elif self.chunked: if hdrs.CONTENT_LENGTH in self.headers: raise ValueError( 'chunked can not be set ' 'if Content-Length header is set') self.headers[hdrs.TRANSFER_ENCODING] = 'chunked' else: if hdrs.CONTENT_LENGTH not in self.headers: self.headers[hdrs.CONTENT_LENGTH] = str(len(self.body))
[ "def", "update_transfer_encoding", "(", "self", ")", "->", "None", ":", "te", "=", "self", ".", "headers", ".", "get", "(", "hdrs", ".", "TRANSFER_ENCODING", ",", "''", ")", ".", "lower", "(", ")", "if", "'chunked'", "in", "te", ":", "if", "self", ".", "chunked", ":", "raise", "ValueError", "(", "'chunked can not be set '", "'if \"Transfer-Encoding: chunked\" header is set'", ")", "elif", "self", ".", "chunked", ":", "if", "hdrs", ".", "CONTENT_LENGTH", "in", "self", ".", "headers", ":", "raise", "ValueError", "(", "'chunked can not be set '", "'if Content-Length header is set'", ")", "self", ".", "headers", "[", "hdrs", ".", "TRANSFER_ENCODING", "]", "=", "'chunked'", "else", ":", "if", "hdrs", ".", "CONTENT_LENGTH", "not", "in", "self", ".", "headers", ":", "self", ".", "headers", "[", "hdrs", ".", "CONTENT_LENGTH", "]", "=", "str", "(", "len", "(", "self", ".", "body", ")", ")" ]
Analyze transfer-encoding header.
[ "Analyze", "transfer", "-", "encoding", "header", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/client_reqrep.py#L400-L419
train
aio-libs/aiohttp
aiohttp/client_reqrep.py
ClientRequest.update_auth
def update_auth(self, auth: Optional[BasicAuth]) -> None: """Set basic auth.""" if auth is None: auth = self.auth if auth is None: return if not isinstance(auth, helpers.BasicAuth): raise TypeError('BasicAuth() tuple is required instead') self.headers[hdrs.AUTHORIZATION] = auth.encode()
python
def update_auth(self, auth: Optional[BasicAuth]) -> None: """Set basic auth.""" if auth is None: auth = self.auth if auth is None: return if not isinstance(auth, helpers.BasicAuth): raise TypeError('BasicAuth() tuple is required instead') self.headers[hdrs.AUTHORIZATION] = auth.encode()
[ "def", "update_auth", "(", "self", ",", "auth", ":", "Optional", "[", "BasicAuth", "]", ")", "->", "None", ":", "if", "auth", "is", "None", ":", "auth", "=", "self", ".", "auth", "if", "auth", "is", "None", ":", "return", "if", "not", "isinstance", "(", "auth", ",", "helpers", ".", "BasicAuth", ")", ":", "raise", "TypeError", "(", "'BasicAuth() tuple is required instead'", ")", "self", ".", "headers", "[", "hdrs", ".", "AUTHORIZATION", "]", "=", "auth", ".", "encode", "(", ")" ]
Set basic auth.
[ "Set", "basic", "auth", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/client_reqrep.py#L421-L431
train
aio-libs/aiohttp
aiohttp/client_reqrep.py
ClientRequest.write_bytes
async def write_bytes(self, writer: AbstractStreamWriter, conn: 'Connection') -> None: """Support coroutines that yields bytes objects.""" # 100 response if self._continue is not None: await writer.drain() await self._continue protocol = conn.protocol assert protocol is not None try: if isinstance(self.body, payload.Payload): await self.body.write(writer) else: if isinstance(self.body, (bytes, bytearray)): self.body = (self.body,) # type: ignore for chunk in self.body: await writer.write(chunk) # type: ignore await writer.write_eof() except OSError as exc: new_exc = ClientOSError( exc.errno, 'Can not write request body for %s' % self.url) new_exc.__context__ = exc new_exc.__cause__ = exc protocol.set_exception(new_exc) except asyncio.CancelledError as exc: if not conn.closed: protocol.set_exception(exc) except Exception as exc: protocol.set_exception(exc) finally: self._writer = None
python
async def write_bytes(self, writer: AbstractStreamWriter, conn: 'Connection') -> None: """Support coroutines that yields bytes objects.""" # 100 response if self._continue is not None: await writer.drain() await self._continue protocol = conn.protocol assert protocol is not None try: if isinstance(self.body, payload.Payload): await self.body.write(writer) else: if isinstance(self.body, (bytes, bytearray)): self.body = (self.body,) # type: ignore for chunk in self.body: await writer.write(chunk) # type: ignore await writer.write_eof() except OSError as exc: new_exc = ClientOSError( exc.errno, 'Can not write request body for %s' % self.url) new_exc.__context__ = exc new_exc.__cause__ = exc protocol.set_exception(new_exc) except asyncio.CancelledError as exc: if not conn.closed: protocol.set_exception(exc) except Exception as exc: protocol.set_exception(exc) finally: self._writer = None
[ "async", "def", "write_bytes", "(", "self", ",", "writer", ":", "AbstractStreamWriter", ",", "conn", ":", "'Connection'", ")", "->", "None", ":", "# 100 response", "if", "self", ".", "_continue", "is", "not", "None", ":", "await", "writer", ".", "drain", "(", ")", "await", "self", ".", "_continue", "protocol", "=", "conn", ".", "protocol", "assert", "protocol", "is", "not", "None", "try", ":", "if", "isinstance", "(", "self", ".", "body", ",", "payload", ".", "Payload", ")", ":", "await", "self", ".", "body", ".", "write", "(", "writer", ")", "else", ":", "if", "isinstance", "(", "self", ".", "body", ",", "(", "bytes", ",", "bytearray", ")", ")", ":", "self", ".", "body", "=", "(", "self", ".", "body", ",", ")", "# type: ignore", "for", "chunk", "in", "self", ".", "body", ":", "await", "writer", ".", "write", "(", "chunk", ")", "# type: ignore", "await", "writer", ".", "write_eof", "(", ")", "except", "OSError", "as", "exc", ":", "new_exc", "=", "ClientOSError", "(", "exc", ".", "errno", ",", "'Can not write request body for %s'", "%", "self", ".", "url", ")", "new_exc", ".", "__context__", "=", "exc", "new_exc", ".", "__cause__", "=", "exc", "protocol", ".", "set_exception", "(", "new_exc", ")", "except", "asyncio", ".", "CancelledError", "as", "exc", ":", "if", "not", "conn", ".", "closed", ":", "protocol", ".", "set_exception", "(", "exc", ")", "except", "Exception", "as", "exc", ":", "protocol", ".", "set_exception", "(", "exc", ")", "finally", ":", "self", ".", "_writer", "=", "None" ]
Support coroutines that yields bytes objects.
[ "Support", "coroutines", "that", "yields", "bytes", "objects", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/client_reqrep.py#L501-L535
train
aio-libs/aiohttp
aiohttp/client_reqrep.py
ClientResponse.start
async def start(self, connection: 'Connection') -> 'ClientResponse': """Start response processing.""" self._closed = False self._protocol = connection.protocol self._connection = connection with self._timer: while True: # read response try: message, payload = await self._protocol.read() # type: ignore # noqa except http.HttpProcessingError as exc: raise ClientResponseError( self.request_info, self.history, status=exc.code, message=exc.message, headers=exc.headers) from exc if (message.code < 100 or message.code > 199 or message.code == 101): break if self._continue is not None: set_result(self._continue, True) self._continue = None # payload eof handler payload.on_eof(self._response_eof) # response status self.version = message.version self.status = message.code self.reason = message.reason # headers self._headers = message.headers # type is CIMultiDictProxy self._raw_headers = message.raw_headers # type is Tuple[bytes, bytes] # payload self.content = payload # cookies for hdr in self.headers.getall(hdrs.SET_COOKIE, ()): try: self.cookies.load(hdr) except CookieError as exc: client_logger.warning( 'Can not load response cookies: %s', exc) return self
python
async def start(self, connection: 'Connection') -> 'ClientResponse': """Start response processing.""" self._closed = False self._protocol = connection.protocol self._connection = connection with self._timer: while True: # read response try: message, payload = await self._protocol.read() # type: ignore # noqa except http.HttpProcessingError as exc: raise ClientResponseError( self.request_info, self.history, status=exc.code, message=exc.message, headers=exc.headers) from exc if (message.code < 100 or message.code > 199 or message.code == 101): break if self._continue is not None: set_result(self._continue, True) self._continue = None # payload eof handler payload.on_eof(self._response_eof) # response status self.version = message.version self.status = message.code self.reason = message.reason # headers self._headers = message.headers # type is CIMultiDictProxy self._raw_headers = message.raw_headers # type is Tuple[bytes, bytes] # payload self.content = payload # cookies for hdr in self.headers.getall(hdrs.SET_COOKIE, ()): try: self.cookies.load(hdr) except CookieError as exc: client_logger.warning( 'Can not load response cookies: %s', exc) return self
[ "async", "def", "start", "(", "self", ",", "connection", ":", "'Connection'", ")", "->", "'ClientResponse'", ":", "self", ".", "_closed", "=", "False", "self", ".", "_protocol", "=", "connection", ".", "protocol", "self", ".", "_connection", "=", "connection", "with", "self", ".", "_timer", ":", "while", "True", ":", "# read response", "try", ":", "message", ",", "payload", "=", "await", "self", ".", "_protocol", ".", "read", "(", ")", "# type: ignore # noqa", "except", "http", ".", "HttpProcessingError", "as", "exc", ":", "raise", "ClientResponseError", "(", "self", ".", "request_info", ",", "self", ".", "history", ",", "status", "=", "exc", ".", "code", ",", "message", "=", "exc", ".", "message", ",", "headers", "=", "exc", ".", "headers", ")", "from", "exc", "if", "(", "message", ".", "code", "<", "100", "or", "message", ".", "code", ">", "199", "or", "message", ".", "code", "==", "101", ")", ":", "break", "if", "self", ".", "_continue", "is", "not", "None", ":", "set_result", "(", "self", ".", "_continue", ",", "True", ")", "self", ".", "_continue", "=", "None", "# payload eof handler", "payload", ".", "on_eof", "(", "self", ".", "_response_eof", ")", "# response status", "self", ".", "version", "=", "message", ".", "version", "self", ".", "status", "=", "message", ".", "code", "self", ".", "reason", "=", "message", ".", "reason", "# headers", "self", ".", "_headers", "=", "message", ".", "headers", "# type is CIMultiDictProxy", "self", ".", "_raw_headers", "=", "message", ".", "raw_headers", "# type is Tuple[bytes, bytes]", "# payload", "self", ".", "content", "=", "payload", "# cookies", "for", "hdr", "in", "self", ".", "headers", ".", "getall", "(", "hdrs", ".", "SET_COOKIE", ",", "(", ")", ")", ":", "try", ":", "self", ".", "cookies", ".", "load", "(", "hdr", ")", "except", "CookieError", "as", "exc", ":", "client_logger", ".", "warning", "(", "'Can not load response cookies: %s'", ",", "exc", ")", "return", "self" ]
Start response processing.
[ "Start", "response", "processing", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/client_reqrep.py#L788-L835
train
aio-libs/aiohttp
aiohttp/client_reqrep.py
ClientResponse.read
async def read(self) -> bytes: """Read response payload.""" if self._body is None: try: self._body = await self.content.read() for trace in self._traces: await trace.send_response_chunk_received(self._body) except BaseException: self.close() raise elif self._released: raise ClientConnectionError('Connection closed') return self._body
python
async def read(self) -> bytes: """Read response payload.""" if self._body is None: try: self._body = await self.content.read() for trace in self._traces: await trace.send_response_chunk_received(self._body) except BaseException: self.close() raise elif self._released: raise ClientConnectionError('Connection closed') return self._body
[ "async", "def", "read", "(", "self", ")", "->", "bytes", ":", "if", "self", ".", "_body", "is", "None", ":", "try", ":", "self", ".", "_body", "=", "await", "self", ".", "content", ".", "read", "(", ")", "for", "trace", "in", "self", ".", "_traces", ":", "await", "trace", ".", "send_response_chunk_received", "(", "self", ".", "_body", ")", "except", "BaseException", ":", "self", ".", "close", "(", ")", "raise", "elif", "self", ".", "_released", ":", "raise", "ClientConnectionError", "(", "'Connection closed'", ")", "return", "self", ".", "_body" ]
Read response payload.
[ "Read", "response", "payload", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/client_reqrep.py#L920-L933
train
aio-libs/aiohttp
aiohttp/client_reqrep.py
ClientResponse.text
async def text(self, encoding: Optional[str]=None, errors: str='strict') -> str: """Read response payload and decode.""" if self._body is None: await self.read() if encoding is None: encoding = self.get_encoding() return self._body.decode(encoding, errors=errors)
python
async def text(self, encoding: Optional[str]=None, errors: str='strict') -> str: """Read response payload and decode.""" if self._body is None: await self.read() if encoding is None: encoding = self.get_encoding() return self._body.decode(encoding, errors=errors)
[ "async", "def", "text", "(", "self", ",", "encoding", ":", "Optional", "[", "str", "]", "=", "None", ",", "errors", ":", "str", "=", "'strict'", ")", "->", "str", ":", "if", "self", ".", "_body", "is", "None", ":", "await", "self", ".", "read", "(", ")", "if", "encoding", "is", "None", ":", "encoding", "=", "self", ".", "get_encoding", "(", ")", "return", "self", ".", "_body", ".", "decode", "(", "encoding", ",", "errors", "=", "errors", ")" ]
Read response payload and decode.
[ "Read", "response", "payload", "and", "decode", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/client_reqrep.py#L956-L965
train
aio-libs/aiohttp
aiohttp/client_reqrep.py
ClientResponse.json
async def json(self, *, encoding: str=None, loads: JSONDecoder=DEFAULT_JSON_DECODER, content_type: Optional[str]='application/json') -> Any: """Read and decodes JSON response.""" if self._body is None: await self.read() if content_type: ctype = self.headers.get(hdrs.CONTENT_TYPE, '').lower() if not _is_expected_content_type(ctype, content_type): raise ContentTypeError( self.request_info, self.history, message=('Attempt to decode JSON with ' 'unexpected mimetype: %s' % ctype), headers=self.headers) if encoding is None: encoding = self.get_encoding() return loads(self._body.decode(encoding))
python
async def json(self, *, encoding: str=None, loads: JSONDecoder=DEFAULT_JSON_DECODER, content_type: Optional[str]='application/json') -> Any: """Read and decodes JSON response.""" if self._body is None: await self.read() if content_type: ctype = self.headers.get(hdrs.CONTENT_TYPE, '').lower() if not _is_expected_content_type(ctype, content_type): raise ContentTypeError( self.request_info, self.history, message=('Attempt to decode JSON with ' 'unexpected mimetype: %s' % ctype), headers=self.headers) if encoding is None: encoding = self.get_encoding() return loads(self._body.decode(encoding))
[ "async", "def", "json", "(", "self", ",", "*", ",", "encoding", ":", "str", "=", "None", ",", "loads", ":", "JSONDecoder", "=", "DEFAULT_JSON_DECODER", ",", "content_type", ":", "Optional", "[", "str", "]", "=", "'application/json'", ")", "->", "Any", ":", "if", "self", ".", "_body", "is", "None", ":", "await", "self", ".", "read", "(", ")", "if", "content_type", ":", "ctype", "=", "self", ".", "headers", ".", "get", "(", "hdrs", ".", "CONTENT_TYPE", ",", "''", ")", ".", "lower", "(", ")", "if", "not", "_is_expected_content_type", "(", "ctype", ",", "content_type", ")", ":", "raise", "ContentTypeError", "(", "self", ".", "request_info", ",", "self", ".", "history", ",", "message", "=", "(", "'Attempt to decode JSON with '", "'unexpected mimetype: %s'", "%", "ctype", ")", ",", "headers", "=", "self", ".", "headers", ")", "if", "encoding", "is", "None", ":", "encoding", "=", "self", ".", "get_encoding", "(", ")", "return", "loads", "(", "self", ".", "_body", ".", "decode", "(", "encoding", ")", ")" ]
Read and decodes JSON response.
[ "Read", "and", "decodes", "JSON", "response", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/client_reqrep.py#L967-L987
train
aio-libs/aiohttp
aiohttp/web_response.py
StreamResponse.enable_chunked_encoding
def enable_chunked_encoding(self, chunk_size: Optional[int]=None) -> None: """Enables automatic chunked transfer encoding.""" self._chunked = True if hdrs.CONTENT_LENGTH in self._headers: raise RuntimeError("You can't enable chunked encoding when " "a content length is set") if chunk_size is not None: warnings.warn('Chunk size is deprecated #1615', DeprecationWarning)
python
def enable_chunked_encoding(self, chunk_size: Optional[int]=None) -> None: """Enables automatic chunked transfer encoding.""" self._chunked = True if hdrs.CONTENT_LENGTH in self._headers: raise RuntimeError("You can't enable chunked encoding when " "a content length is set") if chunk_size is not None: warnings.warn('Chunk size is deprecated #1615', DeprecationWarning)
[ "def", "enable_chunked_encoding", "(", "self", ",", "chunk_size", ":", "Optional", "[", "int", "]", "=", "None", ")", "->", "None", ":", "self", ".", "_chunked", "=", "True", "if", "hdrs", ".", "CONTENT_LENGTH", "in", "self", ".", "_headers", ":", "raise", "RuntimeError", "(", "\"You can't enable chunked encoding when \"", "\"a content length is set\"", ")", "if", "chunk_size", "is", "not", "None", ":", "warnings", ".", "warn", "(", "'Chunk size is deprecated #1615'", ",", "DeprecationWarning", ")" ]
Enables automatic chunked transfer encoding.
[ "Enables", "automatic", "chunked", "transfer", "encoding", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/web_response.py#L144-L152
train
aio-libs/aiohttp
aiohttp/web_response.py
StreamResponse.enable_compression
def enable_compression(self, force: Optional[Union[bool, ContentCoding]]=None ) -> None: """Enables response compression encoding.""" # Backwards compatibility for when force was a bool <0.17. if type(force) == bool: force = ContentCoding.deflate if force else ContentCoding.identity warnings.warn("Using boolean for force is deprecated #3318", DeprecationWarning) elif force is not None: assert isinstance(force, ContentCoding), ("force should one of " "None, bool or " "ContentEncoding") self._compression = True self._compression_force = force
python
def enable_compression(self, force: Optional[Union[bool, ContentCoding]]=None ) -> None: """Enables response compression encoding.""" # Backwards compatibility for when force was a bool <0.17. if type(force) == bool: force = ContentCoding.deflate if force else ContentCoding.identity warnings.warn("Using boolean for force is deprecated #3318", DeprecationWarning) elif force is not None: assert isinstance(force, ContentCoding), ("force should one of " "None, bool or " "ContentEncoding") self._compression = True self._compression_force = force
[ "def", "enable_compression", "(", "self", ",", "force", ":", "Optional", "[", "Union", "[", "bool", ",", "ContentCoding", "]", "]", "=", "None", ")", "->", "None", ":", "# Backwards compatibility for when force was a bool <0.17.", "if", "type", "(", "force", ")", "==", "bool", ":", "force", "=", "ContentCoding", ".", "deflate", "if", "force", "else", "ContentCoding", ".", "identity", "warnings", ".", "warn", "(", "\"Using boolean for force is deprecated #3318\"", ",", "DeprecationWarning", ")", "elif", "force", "is", "not", "None", ":", "assert", "isinstance", "(", "force", ",", "ContentCoding", ")", ",", "(", "\"force should one of \"", "\"None, bool or \"", "\"ContentEncoding\"", ")", "self", ".", "_compression", "=", "True", "self", ".", "_compression_force", "=", "force" ]
Enables response compression encoding.
[ "Enables", "response", "compression", "encoding", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/web_response.py#L154-L169
train
aio-libs/aiohttp
aiohttp/web_response.py
StreamResponse.set_cookie
def set_cookie(self, name: str, value: str, *, expires: Optional[str]=None, domain: Optional[str]=None, max_age: Optional[Union[int, str]]=None, path: str='/', secure: Optional[str]=None, httponly: Optional[str]=None, version: Optional[str]=None) -> None: """Set or update response cookie. Sets new cookie or updates existent with new value. Also updates only those params which are not None. """ old = self._cookies.get(name) if old is not None and old.coded_value == '': # deleted cookie self._cookies.pop(name, None) self._cookies[name] = value c = self._cookies[name] if expires is not None: c['expires'] = expires elif c.get('expires') == 'Thu, 01 Jan 1970 00:00:00 GMT': del c['expires'] if domain is not None: c['domain'] = domain if max_age is not None: c['max-age'] = str(max_age) elif 'max-age' in c: del c['max-age'] c['path'] = path if secure is not None: c['secure'] = secure if httponly is not None: c['httponly'] = httponly if version is not None: c['version'] = version
python
def set_cookie(self, name: str, value: str, *, expires: Optional[str]=None, domain: Optional[str]=None, max_age: Optional[Union[int, str]]=None, path: str='/', secure: Optional[str]=None, httponly: Optional[str]=None, version: Optional[str]=None) -> None: """Set or update response cookie. Sets new cookie or updates existent with new value. Also updates only those params which are not None. """ old = self._cookies.get(name) if old is not None and old.coded_value == '': # deleted cookie self._cookies.pop(name, None) self._cookies[name] = value c = self._cookies[name] if expires is not None: c['expires'] = expires elif c.get('expires') == 'Thu, 01 Jan 1970 00:00:00 GMT': del c['expires'] if domain is not None: c['domain'] = domain if max_age is not None: c['max-age'] = str(max_age) elif 'max-age' in c: del c['max-age'] c['path'] = path if secure is not None: c['secure'] = secure if httponly is not None: c['httponly'] = httponly if version is not None: c['version'] = version
[ "def", "set_cookie", "(", "self", ",", "name", ":", "str", ",", "value", ":", "str", ",", "*", ",", "expires", ":", "Optional", "[", "str", "]", "=", "None", ",", "domain", ":", "Optional", "[", "str", "]", "=", "None", ",", "max_age", ":", "Optional", "[", "Union", "[", "int", ",", "str", "]", "]", "=", "None", ",", "path", ":", "str", "=", "'/'", ",", "secure", ":", "Optional", "[", "str", "]", "=", "None", ",", "httponly", ":", "Optional", "[", "str", "]", "=", "None", ",", "version", ":", "Optional", "[", "str", "]", "=", "None", ")", "->", "None", ":", "old", "=", "self", ".", "_cookies", ".", "get", "(", "name", ")", "if", "old", "is", "not", "None", "and", "old", ".", "coded_value", "==", "''", ":", "# deleted cookie", "self", ".", "_cookies", ".", "pop", "(", "name", ",", "None", ")", "self", ".", "_cookies", "[", "name", "]", "=", "value", "c", "=", "self", ".", "_cookies", "[", "name", "]", "if", "expires", "is", "not", "None", ":", "c", "[", "'expires'", "]", "=", "expires", "elif", "c", ".", "get", "(", "'expires'", ")", "==", "'Thu, 01 Jan 1970 00:00:00 GMT'", ":", "del", "c", "[", "'expires'", "]", "if", "domain", "is", "not", "None", ":", "c", "[", "'domain'", "]", "=", "domain", "if", "max_age", "is", "not", "None", ":", "c", "[", "'max-age'", "]", "=", "str", "(", "max_age", ")", "elif", "'max-age'", "in", "c", ":", "del", "c", "[", "'max-age'", "]", "c", "[", "'path'", "]", "=", "path", "if", "secure", "is", "not", "None", ":", "c", "[", "'secure'", "]", "=", "secure", "if", "httponly", "is", "not", "None", ":", "c", "[", "'httponly'", "]", "=", "httponly", "if", "version", "is", "not", "None", ":", "c", "[", "'version'", "]", "=", "version" ]
Set or update response cookie. Sets new cookie or updates existent with new value. Also updates only those params which are not None.
[ "Set", "or", "update", "response", "cookie", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/web_response.py#L179-L221
train
aio-libs/aiohttp
aiohttp/web_response.py
StreamResponse.del_cookie
def del_cookie(self, name: str, *, domain: Optional[str]=None, path: str='/') -> None: """Delete cookie. Creates new empty expired cookie. """ # TODO: do we need domain/path here? self._cookies.pop(name, None) self.set_cookie(name, '', max_age=0, expires="Thu, 01 Jan 1970 00:00:00 GMT", domain=domain, path=path)
python
def del_cookie(self, name: str, *, domain: Optional[str]=None, path: str='/') -> None: """Delete cookie. Creates new empty expired cookie. """ # TODO: do we need domain/path here? self._cookies.pop(name, None) self.set_cookie(name, '', max_age=0, expires="Thu, 01 Jan 1970 00:00:00 GMT", domain=domain, path=path)
[ "def", "del_cookie", "(", "self", ",", "name", ":", "str", ",", "*", ",", "domain", ":", "Optional", "[", "str", "]", "=", "None", ",", "path", ":", "str", "=", "'/'", ")", "->", "None", ":", "# TODO: do we need domain/path here?", "self", ".", "_cookies", ".", "pop", "(", "name", ",", "None", ")", "self", ".", "set_cookie", "(", "name", ",", "''", ",", "max_age", "=", "0", ",", "expires", "=", "\"Thu, 01 Jan 1970 00:00:00 GMT\"", ",", "domain", "=", "domain", ",", "path", "=", "path", ")" ]
Delete cookie. Creates new empty expired cookie.
[ "Delete", "cookie", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/web_response.py#L223-L234
train
aio-libs/aiohttp
aiohttp/web_response.py
StreamResponse.last_modified
def last_modified(self) -> Optional[datetime.datetime]: """The value of Last-Modified HTTP header, or None. This header is represented as a `datetime` object. """ httpdate = self._headers.get(hdrs.LAST_MODIFIED) if httpdate is not None: timetuple = parsedate(httpdate) if timetuple is not None: return datetime.datetime(*timetuple[:6], tzinfo=datetime.timezone.utc) return None
python
def last_modified(self) -> Optional[datetime.datetime]: """The value of Last-Modified HTTP header, or None. This header is represented as a `datetime` object. """ httpdate = self._headers.get(hdrs.LAST_MODIFIED) if httpdate is not None: timetuple = parsedate(httpdate) if timetuple is not None: return datetime.datetime(*timetuple[:6], tzinfo=datetime.timezone.utc) return None
[ "def", "last_modified", "(", "self", ")", "->", "Optional", "[", "datetime", ".", "datetime", "]", ":", "httpdate", "=", "self", ".", "_headers", ".", "get", "(", "hdrs", ".", "LAST_MODIFIED", ")", "if", "httpdate", "is", "not", "None", ":", "timetuple", "=", "parsedate", "(", "httpdate", ")", "if", "timetuple", "is", "not", "None", ":", "return", "datetime", ".", "datetime", "(", "*", "timetuple", "[", ":", "6", "]", ",", "tzinfo", "=", "datetime", ".", "timezone", ".", "utc", ")", "return", "None" ]
The value of Last-Modified HTTP header, or None. This header is represented as a `datetime` object.
[ "The", "value", "of", "Last", "-", "Modified", "HTTP", "header", "or", "None", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/web_response.py#L282-L293
train
aio-libs/aiohttp
aiohttp/web_urldispatcher.py
_default_expect_handler
async def _default_expect_handler(request: Request) -> None: """Default handler for Expect header. Just send "100 Continue" to client. raise HTTPExpectationFailed if value of header is not "100-continue" """ expect = request.headers.get(hdrs.EXPECT) if request.version == HttpVersion11: if expect.lower() == "100-continue": await request.writer.write(b"HTTP/1.1 100 Continue\r\n\r\n") else: raise HTTPExpectationFailed(text="Unknown Expect: %s" % expect)
python
async def _default_expect_handler(request: Request) -> None: """Default handler for Expect header. Just send "100 Continue" to client. raise HTTPExpectationFailed if value of header is not "100-continue" """ expect = request.headers.get(hdrs.EXPECT) if request.version == HttpVersion11: if expect.lower() == "100-continue": await request.writer.write(b"HTTP/1.1 100 Continue\r\n\r\n") else: raise HTTPExpectationFailed(text="Unknown Expect: %s" % expect)
[ "async", "def", "_default_expect_handler", "(", "request", ":", "Request", ")", "->", "None", ":", "expect", "=", "request", ".", "headers", ".", "get", "(", "hdrs", ".", "EXPECT", ")", "if", "request", ".", "version", "==", "HttpVersion11", ":", "if", "expect", ".", "lower", "(", ")", "==", "\"100-continue\"", ":", "await", "request", ".", "writer", ".", "write", "(", "b\"HTTP/1.1 100 Continue\\r\\n\\r\\n\"", ")", "else", ":", "raise", "HTTPExpectationFailed", "(", "text", "=", "\"Unknown Expect: %s\"", "%", "expect", ")" ]
Default handler for Expect header. Just send "100 Continue" to client. raise HTTPExpectationFailed if value of header is not "100-continue"
[ "Default", "handler", "for", "Expect", "header", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/web_urldispatcher.py#L278-L289
train
aio-libs/aiohttp
aiohttp/web_urldispatcher.py
ResourceRoute.url_for
def url_for(self, *args: str, **kwargs: str) -> URL: """Construct url for route with additional params.""" return self._resource.url_for(*args, **kwargs)
python
def url_for(self, *args: str, **kwargs: str) -> URL: """Construct url for route with additional params.""" return self._resource.url_for(*args, **kwargs)
[ "def", "url_for", "(", "self", ",", "*", "args", ":", "str", ",", "*", "*", "kwargs", ":", "str", ")", "->", "URL", ":", "return", "self", ".", "_resource", ".", "url_for", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
Construct url for route with additional params.
[ "Construct", "url", "for", "route", "with", "additional", "params", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/web_urldispatcher.py#L843-L845
train
aio-libs/aiohttp
aiohttp/web_urldispatcher.py
UrlDispatcher.add_static
def add_static(self, prefix: str, path: PathLike, *, name: Optional[str]=None, expect_handler: Optional[_ExpectHandler]=None, chunk_size: int=256 * 1024, show_index: bool=False, follow_symlinks: bool=False, append_version: bool=False) -> AbstractResource: """Add static files view. prefix - url prefix path - folder with files """ assert prefix.startswith('/') if prefix.endswith('/'): prefix = prefix[:-1] resource = StaticResource(prefix, path, name=name, expect_handler=expect_handler, chunk_size=chunk_size, show_index=show_index, follow_symlinks=follow_symlinks, append_version=append_version) self.register_resource(resource) return resource
python
def add_static(self, prefix: str, path: PathLike, *, name: Optional[str]=None, expect_handler: Optional[_ExpectHandler]=None, chunk_size: int=256 * 1024, show_index: bool=False, follow_symlinks: bool=False, append_version: bool=False) -> AbstractResource: """Add static files view. prefix - url prefix path - folder with files """ assert prefix.startswith('/') if prefix.endswith('/'): prefix = prefix[:-1] resource = StaticResource(prefix, path, name=name, expect_handler=expect_handler, chunk_size=chunk_size, show_index=show_index, follow_symlinks=follow_symlinks, append_version=append_version) self.register_resource(resource) return resource
[ "def", "add_static", "(", "self", ",", "prefix", ":", "str", ",", "path", ":", "PathLike", ",", "*", ",", "name", ":", "Optional", "[", "str", "]", "=", "None", ",", "expect_handler", ":", "Optional", "[", "_ExpectHandler", "]", "=", "None", ",", "chunk_size", ":", "int", "=", "256", "*", "1024", ",", "show_index", ":", "bool", "=", "False", ",", "follow_symlinks", ":", "bool", "=", "False", ",", "append_version", ":", "bool", "=", "False", ")", "->", "AbstractResource", ":", "assert", "prefix", ".", "startswith", "(", "'/'", ")", "if", "prefix", ".", "endswith", "(", "'/'", ")", ":", "prefix", "=", "prefix", "[", ":", "-", "1", "]", "resource", "=", "StaticResource", "(", "prefix", ",", "path", ",", "name", "=", "name", ",", "expect_handler", "=", "expect_handler", ",", "chunk_size", "=", "chunk_size", ",", "show_index", "=", "show_index", ",", "follow_symlinks", "=", "follow_symlinks", ",", "append_version", "=", "append_version", ")", "self", ".", "register_resource", "(", "resource", ")", "return", "resource" ]
Add static files view. prefix - url prefix path - folder with files
[ "Add", "static", "files", "view", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/web_urldispatcher.py#L1036-L1059
train
aio-libs/aiohttp
aiohttp/web_urldispatcher.py
UrlDispatcher.add_head
def add_head(self, path: str, handler: _WebHandler, **kwargs: Any) -> AbstractRoute: """ Shortcut for add_route with method HEAD """ return self.add_route(hdrs.METH_HEAD, path, handler, **kwargs)
python
def add_head(self, path: str, handler: _WebHandler, **kwargs: Any) -> AbstractRoute: """ Shortcut for add_route with method HEAD """ return self.add_route(hdrs.METH_HEAD, path, handler, **kwargs)
[ "def", "add_head", "(", "self", ",", "path", ":", "str", ",", "handler", ":", "_WebHandler", ",", "*", "*", "kwargs", ":", "Any", ")", "->", "AbstractRoute", ":", "return", "self", ".", "add_route", "(", "hdrs", ".", "METH_HEAD", ",", "path", ",", "handler", ",", "*", "*", "kwargs", ")" ]
Shortcut for add_route with method HEAD
[ "Shortcut", "for", "add_route", "with", "method", "HEAD" ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/web_urldispatcher.py#L1061-L1066
train
aio-libs/aiohttp
aiohttp/web_urldispatcher.py
UrlDispatcher.add_options
def add_options(self, path: str, handler: _WebHandler, **kwargs: Any) -> AbstractRoute: """ Shortcut for add_route with method OPTIONS """ return self.add_route(hdrs.METH_OPTIONS, path, handler, **kwargs)
python
def add_options(self, path: str, handler: _WebHandler, **kwargs: Any) -> AbstractRoute: """ Shortcut for add_route with method OPTIONS """ return self.add_route(hdrs.METH_OPTIONS, path, handler, **kwargs)
[ "def", "add_options", "(", "self", ",", "path", ":", "str", ",", "handler", ":", "_WebHandler", ",", "*", "*", "kwargs", ":", "Any", ")", "->", "AbstractRoute", ":", "return", "self", ".", "add_route", "(", "hdrs", ".", "METH_OPTIONS", ",", "path", ",", "handler", ",", "*", "*", "kwargs", ")" ]
Shortcut for add_route with method OPTIONS
[ "Shortcut", "for", "add_route", "with", "method", "OPTIONS" ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/web_urldispatcher.py#L1068-L1073
train
aio-libs/aiohttp
aiohttp/web_urldispatcher.py
UrlDispatcher.add_get
def add_get(self, path: str, handler: _WebHandler, *, name: Optional[str]=None, allow_head: bool=True, **kwargs: Any) -> AbstractRoute: """ Shortcut for add_route with method GET, if allow_head is true another route is added allowing head requests to the same endpoint """ resource = self.add_resource(path, name=name) if allow_head: resource.add_route(hdrs.METH_HEAD, handler, **kwargs) return resource.add_route(hdrs.METH_GET, handler, **kwargs)
python
def add_get(self, path: str, handler: _WebHandler, *, name: Optional[str]=None, allow_head: bool=True, **kwargs: Any) -> AbstractRoute: """ Shortcut for add_route with method GET, if allow_head is true another route is added allowing head requests to the same endpoint """ resource = self.add_resource(path, name=name) if allow_head: resource.add_route(hdrs.METH_HEAD, handler, **kwargs) return resource.add_route(hdrs.METH_GET, handler, **kwargs)
[ "def", "add_get", "(", "self", ",", "path", ":", "str", ",", "handler", ":", "_WebHandler", ",", "*", ",", "name", ":", "Optional", "[", "str", "]", "=", "None", ",", "allow_head", ":", "bool", "=", "True", ",", "*", "*", "kwargs", ":", "Any", ")", "->", "AbstractRoute", ":", "resource", "=", "self", ".", "add_resource", "(", "path", ",", "name", "=", "name", ")", "if", "allow_head", ":", "resource", ".", "add_route", "(", "hdrs", ".", "METH_HEAD", ",", "handler", ",", "*", "*", "kwargs", ")", "return", "resource", ".", "add_route", "(", "hdrs", ".", "METH_GET", ",", "handler", ",", "*", "*", "kwargs", ")" ]
Shortcut for add_route with method GET, if allow_head is true another route is added allowing head requests to the same endpoint
[ "Shortcut", "for", "add_route", "with", "method", "GET", "if", "allow_head", "is", "true", "another", "route", "is", "added", "allowing", "head", "requests", "to", "the", "same", "endpoint" ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/web_urldispatcher.py#L1075-L1085
train
aio-libs/aiohttp
aiohttp/web_urldispatcher.py
UrlDispatcher.add_post
def add_post(self, path: str, handler: _WebHandler, **kwargs: Any) -> AbstractRoute: """ Shortcut for add_route with method POST """ return self.add_route(hdrs.METH_POST, path, handler, **kwargs)
python
def add_post(self, path: str, handler: _WebHandler, **kwargs: Any) -> AbstractRoute: """ Shortcut for add_route with method POST """ return self.add_route(hdrs.METH_POST, path, handler, **kwargs)
[ "def", "add_post", "(", "self", ",", "path", ":", "str", ",", "handler", ":", "_WebHandler", ",", "*", "*", "kwargs", ":", "Any", ")", "->", "AbstractRoute", ":", "return", "self", ".", "add_route", "(", "hdrs", ".", "METH_POST", ",", "path", ",", "handler", ",", "*", "*", "kwargs", ")" ]
Shortcut for add_route with method POST
[ "Shortcut", "for", "add_route", "with", "method", "POST" ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/web_urldispatcher.py#L1087-L1092
train
aio-libs/aiohttp
aiohttp/web_urldispatcher.py
UrlDispatcher.add_put
def add_put(self, path: str, handler: _WebHandler, **kwargs: Any) -> AbstractRoute: """ Shortcut for add_route with method PUT """ return self.add_route(hdrs.METH_PUT, path, handler, **kwargs)
python
def add_put(self, path: str, handler: _WebHandler, **kwargs: Any) -> AbstractRoute: """ Shortcut for add_route with method PUT """ return self.add_route(hdrs.METH_PUT, path, handler, **kwargs)
[ "def", "add_put", "(", "self", ",", "path", ":", "str", ",", "handler", ":", "_WebHandler", ",", "*", "*", "kwargs", ":", "Any", ")", "->", "AbstractRoute", ":", "return", "self", ".", "add_route", "(", "hdrs", ".", "METH_PUT", ",", "path", ",", "handler", ",", "*", "*", "kwargs", ")" ]
Shortcut for add_route with method PUT
[ "Shortcut", "for", "add_route", "with", "method", "PUT" ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/web_urldispatcher.py#L1094-L1099
train
aio-libs/aiohttp
aiohttp/web_urldispatcher.py
UrlDispatcher.add_patch
def add_patch(self, path: str, handler: _WebHandler, **kwargs: Any) -> AbstractRoute: """ Shortcut for add_route with method PATCH """ return self.add_route(hdrs.METH_PATCH, path, handler, **kwargs)
python
def add_patch(self, path: str, handler: _WebHandler, **kwargs: Any) -> AbstractRoute: """ Shortcut for add_route with method PATCH """ return self.add_route(hdrs.METH_PATCH, path, handler, **kwargs)
[ "def", "add_patch", "(", "self", ",", "path", ":", "str", ",", "handler", ":", "_WebHandler", ",", "*", "*", "kwargs", ":", "Any", ")", "->", "AbstractRoute", ":", "return", "self", ".", "add_route", "(", "hdrs", ".", "METH_PATCH", ",", "path", ",", "handler", ",", "*", "*", "kwargs", ")" ]
Shortcut for add_route with method PATCH
[ "Shortcut", "for", "add_route", "with", "method", "PATCH" ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/web_urldispatcher.py#L1101-L1106
train
aio-libs/aiohttp
aiohttp/web_urldispatcher.py
UrlDispatcher.add_delete
def add_delete(self, path: str, handler: _WebHandler, **kwargs: Any) -> AbstractRoute: """ Shortcut for add_route with method DELETE """ return self.add_route(hdrs.METH_DELETE, path, handler, **kwargs)
python
def add_delete(self, path: str, handler: _WebHandler, **kwargs: Any) -> AbstractRoute: """ Shortcut for add_route with method DELETE """ return self.add_route(hdrs.METH_DELETE, path, handler, **kwargs)
[ "def", "add_delete", "(", "self", ",", "path", ":", "str", ",", "handler", ":", "_WebHandler", ",", "*", "*", "kwargs", ":", "Any", ")", "->", "AbstractRoute", ":", "return", "self", ".", "add_route", "(", "hdrs", ".", "METH_DELETE", ",", "path", ",", "handler", ",", "*", "*", "kwargs", ")" ]
Shortcut for add_route with method DELETE
[ "Shortcut", "for", "add_route", "with", "method", "DELETE" ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/web_urldispatcher.py#L1108-L1113
train
aio-libs/aiohttp
aiohttp/web_urldispatcher.py
UrlDispatcher.add_view
def add_view(self, path: str, handler: AbstractView, **kwargs: Any) -> AbstractRoute: """ Shortcut for add_route with ANY methods for a class-based view """ return self.add_route(hdrs.METH_ANY, path, handler, **kwargs)
python
def add_view(self, path: str, handler: AbstractView, **kwargs: Any) -> AbstractRoute: """ Shortcut for add_route with ANY methods for a class-based view """ return self.add_route(hdrs.METH_ANY, path, handler, **kwargs)
[ "def", "add_view", "(", "self", ",", "path", ":", "str", ",", "handler", ":", "AbstractView", ",", "*", "*", "kwargs", ":", "Any", ")", "->", "AbstractRoute", ":", "return", "self", ".", "add_route", "(", "hdrs", ".", "METH_ANY", ",", "path", ",", "handler", ",", "*", "*", "kwargs", ")" ]
Shortcut for add_route with ANY methods for a class-based view
[ "Shortcut", "for", "add_route", "with", "ANY", "methods", "for", "a", "class", "-", "based", "view" ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/web_urldispatcher.py#L1115-L1120
train
aio-libs/aiohttp
aiohttp/web_urldispatcher.py
UrlDispatcher.add_routes
def add_routes(self, routes: Iterable[AbstractRouteDef]) -> None: """Append routes to route table. Parameter should be a sequence of RouteDef objects. """ for route_def in routes: route_def.register(self)
python
def add_routes(self, routes: Iterable[AbstractRouteDef]) -> None: """Append routes to route table. Parameter should be a sequence of RouteDef objects. """ for route_def in routes: route_def.register(self)
[ "def", "add_routes", "(", "self", ",", "routes", ":", "Iterable", "[", "AbstractRouteDef", "]", ")", "->", "None", ":", "for", "route_def", "in", "routes", ":", "route_def", ".", "register", "(", "self", ")" ]
Append routes to route table. Parameter should be a sequence of RouteDef objects.
[ "Append", "routes", "to", "route", "table", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/web_urldispatcher.py#L1127-L1133
train
aio-libs/aiohttp
aiohttp/http_parser.py
HttpParser.parse_headers
def parse_headers( self, lines: List[bytes] ) -> Tuple['CIMultiDictProxy[str]', RawHeaders, Optional[bool], Optional[str], bool, bool]: """Parses RFC 5322 headers from a stream. Line continuations are supported. Returns list of header name and value pairs. Header name is in upper case. """ headers, raw_headers = self._headers_parser.parse_headers(lines) close_conn = None encoding = None upgrade = False chunked = False # keep-alive conn = headers.get(hdrs.CONNECTION) if conn: v = conn.lower() if v == 'close': close_conn = True elif v == 'keep-alive': close_conn = False elif v == 'upgrade': upgrade = True # encoding enc = headers.get(hdrs.CONTENT_ENCODING) if enc: enc = enc.lower() if enc in ('gzip', 'deflate', 'br'): encoding = enc # chunking te = headers.get(hdrs.TRANSFER_ENCODING) if te and 'chunked' in te.lower(): chunked = True return (headers, raw_headers, close_conn, encoding, upgrade, chunked)
python
def parse_headers( self, lines: List[bytes] ) -> Tuple['CIMultiDictProxy[str]', RawHeaders, Optional[bool], Optional[str], bool, bool]: """Parses RFC 5322 headers from a stream. Line continuations are supported. Returns list of header name and value pairs. Header name is in upper case. """ headers, raw_headers = self._headers_parser.parse_headers(lines) close_conn = None encoding = None upgrade = False chunked = False # keep-alive conn = headers.get(hdrs.CONNECTION) if conn: v = conn.lower() if v == 'close': close_conn = True elif v == 'keep-alive': close_conn = False elif v == 'upgrade': upgrade = True # encoding enc = headers.get(hdrs.CONTENT_ENCODING) if enc: enc = enc.lower() if enc in ('gzip', 'deflate', 'br'): encoding = enc # chunking te = headers.get(hdrs.TRANSFER_ENCODING) if te and 'chunked' in te.lower(): chunked = True return (headers, raw_headers, close_conn, encoding, upgrade, chunked)
[ "def", "parse_headers", "(", "self", ",", "lines", ":", "List", "[", "bytes", "]", ")", "->", "Tuple", "[", "'CIMultiDictProxy[str]'", ",", "RawHeaders", ",", "Optional", "[", "bool", "]", ",", "Optional", "[", "str", "]", ",", "bool", ",", "bool", "]", ":", "headers", ",", "raw_headers", "=", "self", ".", "_headers_parser", ".", "parse_headers", "(", "lines", ")", "close_conn", "=", "None", "encoding", "=", "None", "upgrade", "=", "False", "chunked", "=", "False", "# keep-alive", "conn", "=", "headers", ".", "get", "(", "hdrs", ".", "CONNECTION", ")", "if", "conn", ":", "v", "=", "conn", ".", "lower", "(", ")", "if", "v", "==", "'close'", ":", "close_conn", "=", "True", "elif", "v", "==", "'keep-alive'", ":", "close_conn", "=", "False", "elif", "v", "==", "'upgrade'", ":", "upgrade", "=", "True", "# encoding", "enc", "=", "headers", ".", "get", "(", "hdrs", ".", "CONTENT_ENCODING", ")", "if", "enc", ":", "enc", "=", "enc", ".", "lower", "(", ")", "if", "enc", "in", "(", "'gzip'", ",", "'deflate'", ",", "'br'", ")", ":", "encoding", "=", "enc", "# chunking", "te", "=", "headers", ".", "get", "(", "hdrs", ".", "TRANSFER_ENCODING", ")", "if", "te", "and", "'chunked'", "in", "te", ".", "lower", "(", ")", ":", "chunked", "=", "True", "return", "(", "headers", ",", "raw_headers", ",", "close_conn", ",", "encoding", ",", "upgrade", ",", "chunked", ")" ]
Parses RFC 5322 headers from a stream. Line continuations are supported. Returns list of header name and value pairs. Header name is in upper case.
[ "Parses", "RFC", "5322", "headers", "from", "a", "stream", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/http_parser.py#L369-L412
train
aio-libs/aiohttp
aiohttp/client_ws.py
ClientWebSocketResponse.get_extra_info
def get_extra_info(self, name: str, default: Any=None) -> Any: """extra info from connection transport""" conn = self._response.connection if conn is None: return default transport = conn.transport if transport is None: return default return transport.get_extra_info(name, default)
python
def get_extra_info(self, name: str, default: Any=None) -> Any: """extra info from connection transport""" conn = self._response.connection if conn is None: return default transport = conn.transport if transport is None: return default return transport.get_extra_info(name, default)
[ "def", "get_extra_info", "(", "self", ",", "name", ":", "str", ",", "default", ":", "Any", "=", "None", ")", "->", "Any", ":", "conn", "=", "self", ".", "_response", ".", "connection", "if", "conn", "is", "None", ":", "return", "default", "transport", "=", "conn", ".", "transport", "if", "transport", "is", "None", ":", "return", "default", "return", "transport", ".", "get_extra_info", "(", "name", ",", "default", ")" ]
extra info from connection transport
[ "extra", "info", "from", "connection", "transport" ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/client_ws.py#L125-L133
train
kennethreitz/requests-html
requests_html.py
user_agent
def user_agent(style=None) -> _UserAgent: """Returns an apparently legit user-agent, if not requested one of a specific style. Defaults to a Chrome-style User-Agent. """ global useragent if (not useragent) and style: useragent = UserAgent() return useragent[style] if style else DEFAULT_USER_AGENT
python
def user_agent(style=None) -> _UserAgent: """Returns an apparently legit user-agent, if not requested one of a specific style. Defaults to a Chrome-style User-Agent. """ global useragent if (not useragent) and style: useragent = UserAgent() return useragent[style] if style else DEFAULT_USER_AGENT
[ "def", "user_agent", "(", "style", "=", "None", ")", "->", "_UserAgent", ":", "global", "useragent", "if", "(", "not", "useragent", ")", "and", "style", ":", "useragent", "=", "UserAgent", "(", ")", "return", "useragent", "[", "style", "]", "if", "style", "else", "DEFAULT_USER_AGENT" ]
Returns an apparently legit user-agent, if not requested one of a specific style. Defaults to a Chrome-style User-Agent.
[ "Returns", "an", "apparently", "legit", "user", "-", "agent", "if", "not", "requested", "one", "of", "a", "specific", "style", ".", "Defaults", "to", "a", "Chrome", "-", "style", "User", "-", "Agent", "." ]
b59a9f2fb9333d7d467154a0fd82978efdb9d23b
https://github.com/kennethreitz/requests-html/blob/b59a9f2fb9333d7d467154a0fd82978efdb9d23b/requests_html.py#L665-L673
train
kennethreitz/requests-html
requests_html.py
BaseParser.raw_html
def raw_html(self) -> _RawHTML: """Bytes representation of the HTML content. (`learn more <http://www.diveintopython3.net/strings.html>`_). """ if self._html: return self._html else: return etree.tostring(self.element, encoding='unicode').strip().encode(self.encoding)
python
def raw_html(self) -> _RawHTML: """Bytes representation of the HTML content. (`learn more <http://www.diveintopython3.net/strings.html>`_). """ if self._html: return self._html else: return etree.tostring(self.element, encoding='unicode').strip().encode(self.encoding)
[ "def", "raw_html", "(", "self", ")", "->", "_RawHTML", ":", "if", "self", ".", "_html", ":", "return", "self", ".", "_html", "else", ":", "return", "etree", ".", "tostring", "(", "self", ".", "element", ",", "encoding", "=", "'unicode'", ")", ".", "strip", "(", ")", ".", "encode", "(", "self", ".", "encoding", ")" ]
Bytes representation of the HTML content. (`learn more <http://www.diveintopython3.net/strings.html>`_).
[ "Bytes", "representation", "of", "the", "HTML", "content", ".", "(", "learn", "more", "<http", ":", "//", "www", ".", "diveintopython3", ".", "net", "/", "strings", ".", "html", ">", "_", ")", "." ]
b59a9f2fb9333d7d467154a0fd82978efdb9d23b
https://github.com/kennethreitz/requests-html/blob/b59a9f2fb9333d7d467154a0fd82978efdb9d23b/requests_html.py#L90-L97
train
kennethreitz/requests-html
requests_html.py
BaseParser.html
def html(self) -> _BaseHTML: """Unicode representation of the HTML content (`learn more <http://www.diveintopython3.net/strings.html>`_). """ if self._html: return self.raw_html.decode(self.encoding, errors='replace') else: return etree.tostring(self.element, encoding='unicode').strip()
python
def html(self) -> _BaseHTML: """Unicode representation of the HTML content (`learn more <http://www.diveintopython3.net/strings.html>`_). """ if self._html: return self.raw_html.decode(self.encoding, errors='replace') else: return etree.tostring(self.element, encoding='unicode').strip()
[ "def", "html", "(", "self", ")", "->", "_BaseHTML", ":", "if", "self", ".", "_html", ":", "return", "self", ".", "raw_html", ".", "decode", "(", "self", ".", "encoding", ",", "errors", "=", "'replace'", ")", "else", ":", "return", "etree", ".", "tostring", "(", "self", ".", "element", ",", "encoding", "=", "'unicode'", ")", ".", "strip", "(", ")" ]
Unicode representation of the HTML content (`learn more <http://www.diveintopython3.net/strings.html>`_).
[ "Unicode", "representation", "of", "the", "HTML", "content", "(", "learn", "more", "<http", ":", "//", "www", ".", "diveintopython3", ".", "net", "/", "strings", ".", "html", ">", "_", ")", "." ]
b59a9f2fb9333d7d467154a0fd82978efdb9d23b
https://github.com/kennethreitz/requests-html/blob/b59a9f2fb9333d7d467154a0fd82978efdb9d23b/requests_html.py#L100-L107
train
kennethreitz/requests-html
requests_html.py
BaseParser.encoding
def encoding(self) -> _Encoding: """The encoding string to be used, extracted from the HTML and :class:`HTMLResponse <HTMLResponse>` headers. """ if self._encoding: return self._encoding # Scan meta tags for charset. if self._html: self._encoding = html_to_unicode(self.default_encoding, self._html)[0] # Fall back to requests' detected encoding if decode fails. try: self.raw_html.decode(self.encoding, errors='replace') except UnicodeDecodeError: self._encoding = self.default_encoding return self._encoding if self._encoding else self.default_encoding
python
def encoding(self) -> _Encoding: """The encoding string to be used, extracted from the HTML and :class:`HTMLResponse <HTMLResponse>` headers. """ if self._encoding: return self._encoding # Scan meta tags for charset. if self._html: self._encoding = html_to_unicode(self.default_encoding, self._html)[0] # Fall back to requests' detected encoding if decode fails. try: self.raw_html.decode(self.encoding, errors='replace') except UnicodeDecodeError: self._encoding = self.default_encoding return self._encoding if self._encoding else self.default_encoding
[ "def", "encoding", "(", "self", ")", "->", "_Encoding", ":", "if", "self", ".", "_encoding", ":", "return", "self", ".", "_encoding", "# Scan meta tags for charset.", "if", "self", ".", "_html", ":", "self", ".", "_encoding", "=", "html_to_unicode", "(", "self", ".", "default_encoding", ",", "self", ".", "_html", ")", "[", "0", "]", "# Fall back to requests' detected encoding if decode fails.", "try", ":", "self", ".", "raw_html", ".", "decode", "(", "self", ".", "encoding", ",", "errors", "=", "'replace'", ")", "except", "UnicodeDecodeError", ":", "self", ".", "_encoding", "=", "self", ".", "default_encoding", "return", "self", ".", "_encoding", "if", "self", ".", "_encoding", "else", "self", ".", "default_encoding" ]
The encoding string to be used, extracted from the HTML and :class:`HTMLResponse <HTMLResponse>` headers.
[ "The", "encoding", "string", "to", "be", "used", "extracted", "from", "the", "HTML", "and", ":", "class", ":", "HTMLResponse", "<HTMLResponse", ">", "headers", "." ]
b59a9f2fb9333d7d467154a0fd82978efdb9d23b
https://github.com/kennethreitz/requests-html/blob/b59a9f2fb9333d7d467154a0fd82978efdb9d23b/requests_html.py#L119-L136
train
kennethreitz/requests-html
requests_html.py
BaseParser.pq
def pq(self) -> PyQuery: """`PyQuery <https://pythonhosted.org/pyquery/>`_ representation of the :class:`Element <Element>` or :class:`HTML <HTML>`. """ if self._pq is None: self._pq = PyQuery(self.lxml) return self._pq
python
def pq(self) -> PyQuery: """`PyQuery <https://pythonhosted.org/pyquery/>`_ representation of the :class:`Element <Element>` or :class:`HTML <HTML>`. """ if self._pq is None: self._pq = PyQuery(self.lxml) return self._pq
[ "def", "pq", "(", "self", ")", "->", "PyQuery", ":", "if", "self", ".", "_pq", "is", "None", ":", "self", ".", "_pq", "=", "PyQuery", "(", "self", ".", "lxml", ")", "return", "self", ".", "_pq" ]
`PyQuery <https://pythonhosted.org/pyquery/>`_ representation of the :class:`Element <Element>` or :class:`HTML <HTML>`.
[ "PyQuery", "<https", ":", "//", "pythonhosted", ".", "org", "/", "pyquery", "/", ">", "_", "representation", "of", "the", ":", "class", ":", "Element", "<Element", ">", "or", ":", "class", ":", "HTML", "<HTML", ">", "." ]
b59a9f2fb9333d7d467154a0fd82978efdb9d23b
https://github.com/kennethreitz/requests-html/blob/b59a9f2fb9333d7d467154a0fd82978efdb9d23b/requests_html.py#L144-L151
train
kennethreitz/requests-html
requests_html.py
BaseParser.lxml
def lxml(self) -> HtmlElement: """`lxml <http://lxml.de>`_ representation of the :class:`Element <Element>` or :class:`HTML <HTML>`. """ if self._lxml is None: try: self._lxml = soup_parse(self.html, features='html.parser') except ValueError: self._lxml = lxml.html.fromstring(self.raw_html) return self._lxml
python
def lxml(self) -> HtmlElement: """`lxml <http://lxml.de>`_ representation of the :class:`Element <Element>` or :class:`HTML <HTML>`. """ if self._lxml is None: try: self._lxml = soup_parse(self.html, features='html.parser') except ValueError: self._lxml = lxml.html.fromstring(self.raw_html) return self._lxml
[ "def", "lxml", "(", "self", ")", "->", "HtmlElement", ":", "if", "self", ".", "_lxml", "is", "None", ":", "try", ":", "self", ".", "_lxml", "=", "soup_parse", "(", "self", ".", "html", ",", "features", "=", "'html.parser'", ")", "except", "ValueError", ":", "self", ".", "_lxml", "=", "lxml", ".", "html", ".", "fromstring", "(", "self", ".", "raw_html", ")", "return", "self", ".", "_lxml" ]
`lxml <http://lxml.de>`_ representation of the :class:`Element <Element>` or :class:`HTML <HTML>`.
[ "lxml", "<http", ":", "//", "lxml", ".", "de", ">", "_", "representation", "of", "the", ":", "class", ":", "Element", "<Element", ">", "or", ":", "class", ":", "HTML", "<HTML", ">", "." ]
b59a9f2fb9333d7d467154a0fd82978efdb9d23b
https://github.com/kennethreitz/requests-html/blob/b59a9f2fb9333d7d467154a0fd82978efdb9d23b/requests_html.py#L154-L164
train
kennethreitz/requests-html
requests_html.py
BaseParser.find
def find(self, selector: str = "*", *, containing: _Containing = None, clean: bool = False, first: bool = False, _encoding: str = None) -> _Find: """Given a CSS Selector, returns a list of :class:`Element <Element>` objects or a single one. :param selector: CSS Selector to use. :param clean: Whether or not to sanitize the found HTML of ``<script>`` and ``<style>`` tags. :param containing: If specified, only return elements that contain the provided text. :param first: Whether or not to return just the first result. :param _encoding: The encoding format. Example CSS Selectors: - ``a`` - ``a.someClass`` - ``a#someID`` - ``a[target=_blank]`` See W3School's `CSS Selectors Reference <https://www.w3schools.com/cssref/css_selectors.asp>`_ for more details. If ``first`` is ``True``, only returns the first :class:`Element <Element>` found. """ # Convert a single containing into a list. if isinstance(containing, str): containing = [containing] encoding = _encoding or self.encoding elements = [ Element(element=found, url=self.url, default_encoding=encoding) for found in self.pq(selector) ] if containing: elements_copy = elements.copy() elements = [] for element in elements_copy: if any([c.lower() in element.full_text.lower() for c in containing]): elements.append(element) elements.reverse() # Sanitize the found HTML. if clean: elements_copy = elements.copy() elements = [] for element in elements_copy: element.raw_html = lxml_html_tostring(cleaner.clean_html(element.lxml)) elements.append(element) return _get_first_or_list(elements, first)
python
def find(self, selector: str = "*", *, containing: _Containing = None, clean: bool = False, first: bool = False, _encoding: str = None) -> _Find: """Given a CSS Selector, returns a list of :class:`Element <Element>` objects or a single one. :param selector: CSS Selector to use. :param clean: Whether or not to sanitize the found HTML of ``<script>`` and ``<style>`` tags. :param containing: If specified, only return elements that contain the provided text. :param first: Whether or not to return just the first result. :param _encoding: The encoding format. Example CSS Selectors: - ``a`` - ``a.someClass`` - ``a#someID`` - ``a[target=_blank]`` See W3School's `CSS Selectors Reference <https://www.w3schools.com/cssref/css_selectors.asp>`_ for more details. If ``first`` is ``True``, only returns the first :class:`Element <Element>` found. """ # Convert a single containing into a list. if isinstance(containing, str): containing = [containing] encoding = _encoding or self.encoding elements = [ Element(element=found, url=self.url, default_encoding=encoding) for found in self.pq(selector) ] if containing: elements_copy = elements.copy() elements = [] for element in elements_copy: if any([c.lower() in element.full_text.lower() for c in containing]): elements.append(element) elements.reverse() # Sanitize the found HTML. if clean: elements_copy = elements.copy() elements = [] for element in elements_copy: element.raw_html = lxml_html_tostring(cleaner.clean_html(element.lxml)) elements.append(element) return _get_first_or_list(elements, first)
[ "def", "find", "(", "self", ",", "selector", ":", "str", "=", "\"*\"", ",", "*", ",", "containing", ":", "_Containing", "=", "None", ",", "clean", ":", "bool", "=", "False", ",", "first", ":", "bool", "=", "False", ",", "_encoding", ":", "str", "=", "None", ")", "->", "_Find", ":", "# Convert a single containing into a list.", "if", "isinstance", "(", "containing", ",", "str", ")", ":", "containing", "=", "[", "containing", "]", "encoding", "=", "_encoding", "or", "self", ".", "encoding", "elements", "=", "[", "Element", "(", "element", "=", "found", ",", "url", "=", "self", ".", "url", ",", "default_encoding", "=", "encoding", ")", "for", "found", "in", "self", ".", "pq", "(", "selector", ")", "]", "if", "containing", ":", "elements_copy", "=", "elements", ".", "copy", "(", ")", "elements", "=", "[", "]", "for", "element", "in", "elements_copy", ":", "if", "any", "(", "[", "c", ".", "lower", "(", ")", "in", "element", ".", "full_text", ".", "lower", "(", ")", "for", "c", "in", "containing", "]", ")", ":", "elements", ".", "append", "(", "element", ")", "elements", ".", "reverse", "(", ")", "# Sanitize the found HTML.", "if", "clean", ":", "elements_copy", "=", "elements", ".", "copy", "(", ")", "elements", "=", "[", "]", "for", "element", "in", "elements_copy", ":", "element", ".", "raw_html", "=", "lxml_html_tostring", "(", "cleaner", ".", "clean_html", "(", "element", ".", "lxml", ")", ")", "elements", ".", "append", "(", "element", ")", "return", "_get_first_or_list", "(", "elements", ",", "first", ")" ]
Given a CSS Selector, returns a list of :class:`Element <Element>` objects or a single one. :param selector: CSS Selector to use. :param clean: Whether or not to sanitize the found HTML of ``<script>`` and ``<style>`` tags. :param containing: If specified, only return elements that contain the provided text. :param first: Whether or not to return just the first result. :param _encoding: The encoding format. Example CSS Selectors: - ``a`` - ``a.someClass`` - ``a#someID`` - ``a[target=_blank]`` See W3School's `CSS Selectors Reference <https://www.w3schools.com/cssref/css_selectors.asp>`_ for more details. If ``first`` is ``True``, only returns the first :class:`Element <Element>` found.
[ "Given", "a", "CSS", "Selector", "returns", "a", "list", "of", ":", "class", ":", "Element", "<Element", ">", "objects", "or", "a", "single", "one", "." ]
b59a9f2fb9333d7d467154a0fd82978efdb9d23b
https://github.com/kennethreitz/requests-html/blob/b59a9f2fb9333d7d467154a0fd82978efdb9d23b/requests_html.py#L180-L234
train
kennethreitz/requests-html
requests_html.py
BaseParser.xpath
def xpath(self, selector: str, *, clean: bool = False, first: bool = False, _encoding: str = None) -> _XPath: """Given an XPath selector, returns a list of :class:`Element <Element>` objects or a single one. :param selector: XPath Selector to use. :param clean: Whether or not to sanitize the found HTML of ``<script>`` and ``<style>`` tags. :param first: Whether or not to return just the first result. :param _encoding: The encoding format. If a sub-selector is specified (e.g. ``//a/@href``), a simple list of results is returned. See W3School's `XPath Examples <https://www.w3schools.com/xml/xpath_examples.asp>`_ for more details. If ``first`` is ``True``, only returns the first :class:`Element <Element>` found. """ selected = self.lxml.xpath(selector) elements = [ Element(element=selection, url=self.url, default_encoding=_encoding or self.encoding) if not isinstance(selection, etree._ElementUnicodeResult) else str(selection) for selection in selected ] # Sanitize the found HTML. if clean: elements_copy = elements.copy() elements = [] for element in elements_copy: element.raw_html = lxml_html_tostring(cleaner.clean_html(element.lxml)) elements.append(element) return _get_first_or_list(elements, first)
python
def xpath(self, selector: str, *, clean: bool = False, first: bool = False, _encoding: str = None) -> _XPath: """Given an XPath selector, returns a list of :class:`Element <Element>` objects or a single one. :param selector: XPath Selector to use. :param clean: Whether or not to sanitize the found HTML of ``<script>`` and ``<style>`` tags. :param first: Whether or not to return just the first result. :param _encoding: The encoding format. If a sub-selector is specified (e.g. ``//a/@href``), a simple list of results is returned. See W3School's `XPath Examples <https://www.w3schools.com/xml/xpath_examples.asp>`_ for more details. If ``first`` is ``True``, only returns the first :class:`Element <Element>` found. """ selected = self.lxml.xpath(selector) elements = [ Element(element=selection, url=self.url, default_encoding=_encoding or self.encoding) if not isinstance(selection, etree._ElementUnicodeResult) else str(selection) for selection in selected ] # Sanitize the found HTML. if clean: elements_copy = elements.copy() elements = [] for element in elements_copy: element.raw_html = lxml_html_tostring(cleaner.clean_html(element.lxml)) elements.append(element) return _get_first_or_list(elements, first)
[ "def", "xpath", "(", "self", ",", "selector", ":", "str", ",", "*", ",", "clean", ":", "bool", "=", "False", ",", "first", ":", "bool", "=", "False", ",", "_encoding", ":", "str", "=", "None", ")", "->", "_XPath", ":", "selected", "=", "self", ".", "lxml", ".", "xpath", "(", "selector", ")", "elements", "=", "[", "Element", "(", "element", "=", "selection", ",", "url", "=", "self", ".", "url", ",", "default_encoding", "=", "_encoding", "or", "self", ".", "encoding", ")", "if", "not", "isinstance", "(", "selection", ",", "etree", ".", "_ElementUnicodeResult", ")", "else", "str", "(", "selection", ")", "for", "selection", "in", "selected", "]", "# Sanitize the found HTML.", "if", "clean", ":", "elements_copy", "=", "elements", ".", "copy", "(", ")", "elements", "=", "[", "]", "for", "element", "in", "elements_copy", ":", "element", ".", "raw_html", "=", "lxml_html_tostring", "(", "cleaner", ".", "clean_html", "(", "element", ".", "lxml", ")", ")", "elements", ".", "append", "(", "element", ")", "return", "_get_first_or_list", "(", "elements", ",", "first", ")" ]
Given an XPath selector, returns a list of :class:`Element <Element>` objects or a single one. :param selector: XPath Selector to use. :param clean: Whether or not to sanitize the found HTML of ``<script>`` and ``<style>`` tags. :param first: Whether or not to return just the first result. :param _encoding: The encoding format. If a sub-selector is specified (e.g. ``//a/@href``), a simple list of results is returned. See W3School's `XPath Examples <https://www.w3schools.com/xml/xpath_examples.asp>`_ for more details. If ``first`` is ``True``, only returns the first :class:`Element <Element>` found.
[ "Given", "an", "XPath", "selector", "returns", "a", "list", "of", ":", "class", ":", "Element", "<Element", ">", "objects", "or", "a", "single", "one", "." ]
b59a9f2fb9333d7d467154a0fd82978efdb9d23b
https://github.com/kennethreitz/requests-html/blob/b59a9f2fb9333d7d467154a0fd82978efdb9d23b/requests_html.py#L236-L272
train
kennethreitz/requests-html
requests_html.py
BaseParser.search_all
def search_all(self, template: str) -> _Result: """Search the :class:`Element <Element>` (multiple times) for the given parse template. :param template: The Parse template to use. """ return [r for r in findall(template, self.html)]
python
def search_all(self, template: str) -> _Result: """Search the :class:`Element <Element>` (multiple times) for the given parse template. :param template: The Parse template to use. """ return [r for r in findall(template, self.html)]
[ "def", "search_all", "(", "self", ",", "template", ":", "str", ")", "->", "_Result", ":", "return", "[", "r", "for", "r", "in", "findall", "(", "template", ",", "self", ".", "html", ")", "]" ]
Search the :class:`Element <Element>` (multiple times) for the given parse template. :param template: The Parse template to use.
[ "Search", "the", ":", "class", ":", "Element", "<Element", ">", "(", "multiple", "times", ")", "for", "the", "given", "parse", "template", "." ]
b59a9f2fb9333d7d467154a0fd82978efdb9d23b
https://github.com/kennethreitz/requests-html/blob/b59a9f2fb9333d7d467154a0fd82978efdb9d23b/requests_html.py#L282-L288
train
kennethreitz/requests-html
requests_html.py
BaseParser.links
def links(self) -> _Links: """All found links on page, in as–is form.""" def gen(): for link in self.find('a'): try: href = link.attrs['href'].strip() if href and not (href.startswith('#') and self.skip_anchors) and not href.startswith(('javascript:', 'mailto:')): yield href except KeyError: pass return set(gen())
python
def links(self) -> _Links: """All found links on page, in as–is form.""" def gen(): for link in self.find('a'): try: href = link.attrs['href'].strip() if href and not (href.startswith('#') and self.skip_anchors) and not href.startswith(('javascript:', 'mailto:')): yield href except KeyError: pass return set(gen())
[ "def", "links", "(", "self", ")", "->", "_Links", ":", "def", "gen", "(", ")", ":", "for", "link", "in", "self", ".", "find", "(", "'a'", ")", ":", "try", ":", "href", "=", "link", ".", "attrs", "[", "'href'", "]", ".", "strip", "(", ")", "if", "href", "and", "not", "(", "href", ".", "startswith", "(", "'#'", ")", "and", "self", ".", "skip_anchors", ")", "and", "not", "href", ".", "startswith", "(", "(", "'javascript:'", ",", "'mailto:'", ")", ")", ":", "yield", "href", "except", "KeyError", ":", "pass", "return", "set", "(", "gen", "(", ")", ")" ]
All found links on page, in as–is form.
[ "All", "found", "links", "on", "page", "in", "as–is", "form", "." ]
b59a9f2fb9333d7d467154a0fd82978efdb9d23b
https://github.com/kennethreitz/requests-html/blob/b59a9f2fb9333d7d467154a0fd82978efdb9d23b/requests_html.py#L291-L304
train
kennethreitz/requests-html
requests_html.py
BaseParser._make_absolute
def _make_absolute(self, link): """Makes a given link absolute.""" # Parse the link with stdlib. parsed = urlparse(link)._asdict() # If link is relative, then join it with base_url. if not parsed['netloc']: return urljoin(self.base_url, link) # Link is absolute; if it lacks a scheme, add one from base_url. if not parsed['scheme']: parsed['scheme'] = urlparse(self.base_url).scheme # Reconstruct the URL to incorporate the new scheme. parsed = (v for v in parsed.values()) return urlunparse(parsed) # Link is absolute and complete with scheme; nothing to be done here. return link
python
def _make_absolute(self, link): """Makes a given link absolute.""" # Parse the link with stdlib. parsed = urlparse(link)._asdict() # If link is relative, then join it with base_url. if not parsed['netloc']: return urljoin(self.base_url, link) # Link is absolute; if it lacks a scheme, add one from base_url. if not parsed['scheme']: parsed['scheme'] = urlparse(self.base_url).scheme # Reconstruct the URL to incorporate the new scheme. parsed = (v for v in parsed.values()) return urlunparse(parsed) # Link is absolute and complete with scheme; nothing to be done here. return link
[ "def", "_make_absolute", "(", "self", ",", "link", ")", ":", "# Parse the link with stdlib.", "parsed", "=", "urlparse", "(", "link", ")", ".", "_asdict", "(", ")", "# If link is relative, then join it with base_url.", "if", "not", "parsed", "[", "'netloc'", "]", ":", "return", "urljoin", "(", "self", ".", "base_url", ",", "link", ")", "# Link is absolute; if it lacks a scheme, add one from base_url.", "if", "not", "parsed", "[", "'scheme'", "]", ":", "parsed", "[", "'scheme'", "]", "=", "urlparse", "(", "self", ".", "base_url", ")", ".", "scheme", "# Reconstruct the URL to incorporate the new scheme.", "parsed", "=", "(", "v", "for", "v", "in", "parsed", ".", "values", "(", ")", ")", "return", "urlunparse", "(", "parsed", ")", "# Link is absolute and complete with scheme; nothing to be done here.", "return", "link" ]
Makes a given link absolute.
[ "Makes", "a", "given", "link", "absolute", "." ]
b59a9f2fb9333d7d467154a0fd82978efdb9d23b
https://github.com/kennethreitz/requests-html/blob/b59a9f2fb9333d7d467154a0fd82978efdb9d23b/requests_html.py#L306-L325
train
kennethreitz/requests-html
requests_html.py
BaseParser.absolute_links
def absolute_links(self) -> _Links: """All found links on page, in absolute form (`learn more <https://www.navegabem.com/absolute-or-relative-links.html>`_). """ def gen(): for link in self.links: yield self._make_absolute(link) return set(gen())
python
def absolute_links(self) -> _Links: """All found links on page, in absolute form (`learn more <https://www.navegabem.com/absolute-or-relative-links.html>`_). """ def gen(): for link in self.links: yield self._make_absolute(link) return set(gen())
[ "def", "absolute_links", "(", "self", ")", "->", "_Links", ":", "def", "gen", "(", ")", ":", "for", "link", "in", "self", ".", "links", ":", "yield", "self", ".", "_make_absolute", "(", "link", ")", "return", "set", "(", "gen", "(", ")", ")" ]
All found links on page, in absolute form (`learn more <https://www.navegabem.com/absolute-or-relative-links.html>`_).
[ "All", "found", "links", "on", "page", "in", "absolute", "form", "(", "learn", "more", "<https", ":", "//", "www", ".", "navegabem", ".", "com", "/", "absolute", "-", "or", "-", "relative", "-", "links", ".", "html", ">", "_", ")", "." ]
b59a9f2fb9333d7d467154a0fd82978efdb9d23b
https://github.com/kennethreitz/requests-html/blob/b59a9f2fb9333d7d467154a0fd82978efdb9d23b/requests_html.py#L329-L338
train
kennethreitz/requests-html
requests_html.py
BaseParser.base_url
def base_url(self) -> _URL: """The base URL for the page. Supports the ``<base>`` tag (`learn more <https://www.w3schools.com/tags/tag_base.asp>`_).""" # Support for <base> tag. base = self.find('base', first=True) if base: result = base.attrs.get('href', '').strip() if result: return result # Parse the url to separate out the path parsed = urlparse(self.url)._asdict() # Remove any part of the path after the last '/' parsed['path'] = '/'.join(parsed['path'].split('/')[:-1]) + '/' # Reconstruct the url with the modified path parsed = (v for v in parsed.values()) url = urlunparse(parsed) return url
python
def base_url(self) -> _URL: """The base URL for the page. Supports the ``<base>`` tag (`learn more <https://www.w3schools.com/tags/tag_base.asp>`_).""" # Support for <base> tag. base = self.find('base', first=True) if base: result = base.attrs.get('href', '').strip() if result: return result # Parse the url to separate out the path parsed = urlparse(self.url)._asdict() # Remove any part of the path after the last '/' parsed['path'] = '/'.join(parsed['path'].split('/')[:-1]) + '/' # Reconstruct the url with the modified path parsed = (v for v in parsed.values()) url = urlunparse(parsed) return url
[ "def", "base_url", "(", "self", ")", "->", "_URL", ":", "# Support for <base> tag.", "base", "=", "self", ".", "find", "(", "'base'", ",", "first", "=", "True", ")", "if", "base", ":", "result", "=", "base", ".", "attrs", ".", "get", "(", "'href'", ",", "''", ")", ".", "strip", "(", ")", "if", "result", ":", "return", "result", "# Parse the url to separate out the path", "parsed", "=", "urlparse", "(", "self", ".", "url", ")", ".", "_asdict", "(", ")", "# Remove any part of the path after the last '/'", "parsed", "[", "'path'", "]", "=", "'/'", ".", "join", "(", "parsed", "[", "'path'", "]", ".", "split", "(", "'/'", ")", "[", ":", "-", "1", "]", ")", "+", "'/'", "# Reconstruct the url with the modified path", "parsed", "=", "(", "v", "for", "v", "in", "parsed", ".", "values", "(", ")", ")", "url", "=", "urlunparse", "(", "parsed", ")", "return", "url" ]
The base URL for the page. Supports the ``<base>`` tag (`learn more <https://www.w3schools.com/tags/tag_base.asp>`_).
[ "The", "base", "URL", "for", "the", "page", ".", "Supports", "the", "<base", ">", "tag", "(", "learn", "more", "<https", ":", "//", "www", ".", "w3schools", ".", "com", "/", "tags", "/", "tag_base", ".", "asp", ">", "_", ")", "." ]
b59a9f2fb9333d7d467154a0fd82978efdb9d23b
https://github.com/kennethreitz/requests-html/blob/b59a9f2fb9333d7d467154a0fd82978efdb9d23b/requests_html.py#L341-L362
train
kennethreitz/requests-html
requests_html.py
Element.attrs
def attrs(self) -> _Attrs: """Returns a dictionary of the attributes of the :class:`Element <Element>` (`learn more <https://www.w3schools.com/tags/ref_attributes.asp>`_). """ if self._attrs is None: self._attrs = {k: v for k, v in self.element.items()} # Split class and rel up, as there are ussually many of them: for attr in ['class', 'rel']: if attr in self._attrs: self._attrs[attr] = tuple(self._attrs[attr].split()) return self._attrs
python
def attrs(self) -> _Attrs: """Returns a dictionary of the attributes of the :class:`Element <Element>` (`learn more <https://www.w3schools.com/tags/ref_attributes.asp>`_). """ if self._attrs is None: self._attrs = {k: v for k, v in self.element.items()} # Split class and rel up, as there are ussually many of them: for attr in ['class', 'rel']: if attr in self._attrs: self._attrs[attr] = tuple(self._attrs[attr].split()) return self._attrs
[ "def", "attrs", "(", "self", ")", "->", "_Attrs", ":", "if", "self", ".", "_attrs", "is", "None", ":", "self", ".", "_attrs", "=", "{", "k", ":", "v", "for", "k", ",", "v", "in", "self", ".", "element", ".", "items", "(", ")", "}", "# Split class and rel up, as there are ussually many of them:", "for", "attr", "in", "[", "'class'", ",", "'rel'", "]", ":", "if", "attr", "in", "self", ".", "_attrs", ":", "self", ".", "_attrs", "[", "attr", "]", "=", "tuple", "(", "self", ".", "_attrs", "[", "attr", "]", ".", "split", "(", ")", ")", "return", "self", ".", "_attrs" ]
Returns a dictionary of the attributes of the :class:`Element <Element>` (`learn more <https://www.w3schools.com/tags/ref_attributes.asp>`_).
[ "Returns", "a", "dictionary", "of", "the", "attributes", "of", "the", ":", "class", ":", "Element", "<Element", ">", "(", "learn", "more", "<https", ":", "//", "www", ".", "w3schools", ".", "com", "/", "tags", "/", "ref_attributes", ".", "asp", ">", "_", ")", "." ]
b59a9f2fb9333d7d467154a0fd82978efdb9d23b
https://github.com/kennethreitz/requests-html/blob/b59a9f2fb9333d7d467154a0fd82978efdb9d23b/requests_html.py#L390-L402
train
kennethreitz/requests-html
requests_html.py
HTML.next
def next(self, fetch: bool = False, next_symbol: _NextSymbol = DEFAULT_NEXT_SYMBOL) -> _Next: """Attempts to find the next page, if there is one. If ``fetch`` is ``True`` (default), returns :class:`HTML <HTML>` object of next page. If ``fetch`` is ``False``, simply returns the next URL. """ def get_next(): candidates = self.find('a', containing=next_symbol) for candidate in candidates: if candidate.attrs.get('href'): # Support 'next' rel (e.g. reddit). if 'next' in candidate.attrs.get('rel', []): return candidate.attrs['href'] # Support 'next' in classnames. for _class in candidate.attrs.get('class', []): if 'next' in _class: return candidate.attrs['href'] if 'page' in candidate.attrs['href']: return candidate.attrs['href'] try: # Resort to the last candidate. return candidates[-1].attrs['href'] except IndexError: return None __next = get_next() if __next: url = self._make_absolute(__next) else: return None if fetch: return self.session.get(url) else: return url
python
def next(self, fetch: bool = False, next_symbol: _NextSymbol = DEFAULT_NEXT_SYMBOL) -> _Next: """Attempts to find the next page, if there is one. If ``fetch`` is ``True`` (default), returns :class:`HTML <HTML>` object of next page. If ``fetch`` is ``False``, simply returns the next URL. """ def get_next(): candidates = self.find('a', containing=next_symbol) for candidate in candidates: if candidate.attrs.get('href'): # Support 'next' rel (e.g. reddit). if 'next' in candidate.attrs.get('rel', []): return candidate.attrs['href'] # Support 'next' in classnames. for _class in candidate.attrs.get('class', []): if 'next' in _class: return candidate.attrs['href'] if 'page' in candidate.attrs['href']: return candidate.attrs['href'] try: # Resort to the last candidate. return candidates[-1].attrs['href'] except IndexError: return None __next = get_next() if __next: url = self._make_absolute(__next) else: return None if fetch: return self.session.get(url) else: return url
[ "def", "next", "(", "self", ",", "fetch", ":", "bool", "=", "False", ",", "next_symbol", ":", "_NextSymbol", "=", "DEFAULT_NEXT_SYMBOL", ")", "->", "_Next", ":", "def", "get_next", "(", ")", ":", "candidates", "=", "self", ".", "find", "(", "'a'", ",", "containing", "=", "next_symbol", ")", "for", "candidate", "in", "candidates", ":", "if", "candidate", ".", "attrs", ".", "get", "(", "'href'", ")", ":", "# Support 'next' rel (e.g. reddit).", "if", "'next'", "in", "candidate", ".", "attrs", ".", "get", "(", "'rel'", ",", "[", "]", ")", ":", "return", "candidate", ".", "attrs", "[", "'href'", "]", "# Support 'next' in classnames.", "for", "_class", "in", "candidate", ".", "attrs", ".", "get", "(", "'class'", ",", "[", "]", ")", ":", "if", "'next'", "in", "_class", ":", "return", "candidate", ".", "attrs", "[", "'href'", "]", "if", "'page'", "in", "candidate", ".", "attrs", "[", "'href'", "]", ":", "return", "candidate", ".", "attrs", "[", "'href'", "]", "try", ":", "# Resort to the last candidate.", "return", "candidates", "[", "-", "1", "]", ".", "attrs", "[", "'href'", "]", "except", "IndexError", ":", "return", "None", "__next", "=", "get_next", "(", ")", "if", "__next", ":", "url", "=", "self", ".", "_make_absolute", "(", "__next", ")", "else", ":", "return", "None", "if", "fetch", ":", "return", "self", ".", "session", ".", "get", "(", "url", ")", "else", ":", "return", "url" ]
Attempts to find the next page, if there is one. If ``fetch`` is ``True`` (default), returns :class:`HTML <HTML>` object of next page. If ``fetch`` is ``False``, simply returns the next URL.
[ "Attempts", "to", "find", "the", "next", "page", "if", "there", "is", "one", ".", "If", "fetch", "is", "True", "(", "default", ")", "returns", ":", "class", ":", "HTML", "<HTML", ">", "object", "of", "next", "page", ".", "If", "fetch", "is", "False", "simply", "returns", "the", "next", "URL", "." ]
b59a9f2fb9333d7d467154a0fd82978efdb9d23b
https://github.com/kennethreitz/requests-html/blob/b59a9f2fb9333d7d467154a0fd82978efdb9d23b/requests_html.py#L433-L472
train
kennethreitz/requests-html
requests_html.py
HTML._async_render
async def _async_render(self, *, url: str, script: str = None, scrolldown, sleep: int, wait: float, reload, content: Optional[str], timeout: Union[float, int], keep_page: bool): """ Handle page creation and js rendering. Internal use for render/arender methods. """ try: page = await self.browser.newPage() # Wait before rendering the page, to prevent timeouts. await asyncio.sleep(wait) # Load the given page (GET request, obviously.) if reload: await page.goto(url, options={'timeout': int(timeout * 1000)}) else: await page.goto(f'data:text/html,{self.html}', options={'timeout': int(timeout * 1000)}) result = None if script: result = await page.evaluate(script) if scrolldown: for _ in range(scrolldown): await page._keyboard.down('PageDown') await asyncio.sleep(sleep) else: await asyncio.sleep(sleep) if scrolldown: await page._keyboard.up('PageDown') # Return the content of the page, JavaScript evaluated. content = await page.content() if not keep_page: await page.close() page = None return content, result, page except TimeoutError: await page.close() page = None return None
python
async def _async_render(self, *, url: str, script: str = None, scrolldown, sleep: int, wait: float, reload, content: Optional[str], timeout: Union[float, int], keep_page: bool): """ Handle page creation and js rendering. Internal use for render/arender methods. """ try: page = await self.browser.newPage() # Wait before rendering the page, to prevent timeouts. await asyncio.sleep(wait) # Load the given page (GET request, obviously.) if reload: await page.goto(url, options={'timeout': int(timeout * 1000)}) else: await page.goto(f'data:text/html,{self.html}', options={'timeout': int(timeout * 1000)}) result = None if script: result = await page.evaluate(script) if scrolldown: for _ in range(scrolldown): await page._keyboard.down('PageDown') await asyncio.sleep(sleep) else: await asyncio.sleep(sleep) if scrolldown: await page._keyboard.up('PageDown') # Return the content of the page, JavaScript evaluated. content = await page.content() if not keep_page: await page.close() page = None return content, result, page except TimeoutError: await page.close() page = None return None
[ "async", "def", "_async_render", "(", "self", ",", "*", ",", "url", ":", "str", ",", "script", ":", "str", "=", "None", ",", "scrolldown", ",", "sleep", ":", "int", ",", "wait", ":", "float", ",", "reload", ",", "content", ":", "Optional", "[", "str", "]", ",", "timeout", ":", "Union", "[", "float", ",", "int", "]", ",", "keep_page", ":", "bool", ")", ":", "try", ":", "page", "=", "await", "self", ".", "browser", ".", "newPage", "(", ")", "# Wait before rendering the page, to prevent timeouts.", "await", "asyncio", ".", "sleep", "(", "wait", ")", "# Load the given page (GET request, obviously.)", "if", "reload", ":", "await", "page", ".", "goto", "(", "url", ",", "options", "=", "{", "'timeout'", ":", "int", "(", "timeout", "*", "1000", ")", "}", ")", "else", ":", "await", "page", ".", "goto", "(", "f'data:text/html,{self.html}'", ",", "options", "=", "{", "'timeout'", ":", "int", "(", "timeout", "*", "1000", ")", "}", ")", "result", "=", "None", "if", "script", ":", "result", "=", "await", "page", ".", "evaluate", "(", "script", ")", "if", "scrolldown", ":", "for", "_", "in", "range", "(", "scrolldown", ")", ":", "await", "page", ".", "_keyboard", ".", "down", "(", "'PageDown'", ")", "await", "asyncio", ".", "sleep", "(", "sleep", ")", "else", ":", "await", "asyncio", ".", "sleep", "(", "sleep", ")", "if", "scrolldown", ":", "await", "page", ".", "_keyboard", ".", "up", "(", "'PageDown'", ")", "# Return the content of the page, JavaScript evaluated.", "content", "=", "await", "page", ".", "content", "(", ")", "if", "not", "keep_page", ":", "await", "page", ".", "close", "(", ")", "page", "=", "None", "return", "content", ",", "result", ",", "page", "except", "TimeoutError", ":", "await", "page", ".", "close", "(", ")", "page", "=", "None", "return", "None" ]
Handle page creation and js rendering. Internal use for render/arender methods.
[ "Handle", "page", "creation", "and", "js", "rendering", ".", "Internal", "use", "for", "render", "/", "arender", "methods", "." ]
b59a9f2fb9333d7d467154a0fd82978efdb9d23b
https://github.com/kennethreitz/requests-html/blob/b59a9f2fb9333d7d467154a0fd82978efdb9d23b/requests_html.py#L502-L539
train
kennethreitz/requests-html
requests_html.py
HTML.render
def render(self, retries: int = 8, script: str = None, wait: float = 0.2, scrolldown=False, sleep: int = 0, reload: bool = True, timeout: Union[float, int] = 8.0, keep_page: bool = False): """Reloads the response in Chromium, and replaces HTML content with an updated version, with JavaScript executed. :param retries: The number of times to retry loading the page in Chromium. :param script: JavaScript to execute upon page load (optional). :param wait: The number of seconds to wait before loading the page, preventing timeouts (optional). :param scrolldown: Integer, if provided, of how many times to page down. :param sleep: Integer, if provided, of how many long to sleep after initial render. :param reload: If ``False``, content will not be loaded from the browser, but will be provided from memory. :param keep_page: If ``True`` will allow you to interact with the browser page through ``r.html.page``. If ``scrolldown`` is specified, the page will scrolldown the specified number of times, after sleeping the specified amount of time (e.g. ``scrolldown=10, sleep=1``). If just ``sleep`` is provided, the rendering will wait *n* seconds, before returning. If ``script`` is specified, it will execute the provided JavaScript at runtime. Example: .. code-block:: python script = \"\"\" () => { return { width: document.documentElement.clientWidth, height: document.documentElement.clientHeight, deviceScaleFactor: window.devicePixelRatio, } } \"\"\" Returns the return value of the executed ``script``, if any is provided: .. code-block:: python >>> r.html.render(script=script) {'width': 800, 'height': 600, 'deviceScaleFactor': 1} Warning: the first time you run this method, it will download Chromium into your home directory (``~/.pyppeteer``). """ self.browser = self.session.browser # Automatically create a event loop and browser content = None # Automatically set Reload to False, if example URL is being used. if self.url == DEFAULT_URL: reload = False for i in range(retries): if not content: try: content, result, page = self.session.loop.run_until_complete(self._async_render(url=self.url, script=script, sleep=sleep, wait=wait, content=self.html, reload=reload, scrolldown=scrolldown, timeout=timeout, keep_page=keep_page)) except TypeError: pass else: break if not content: raise MaxRetries("Unable to render the page. Try increasing timeout") html = HTML(url=self.url, html=content.encode(DEFAULT_ENCODING), default_encoding=DEFAULT_ENCODING) self.__dict__.update(html.__dict__) self.page = page return result
python
def render(self, retries: int = 8, script: str = None, wait: float = 0.2, scrolldown=False, sleep: int = 0, reload: bool = True, timeout: Union[float, int] = 8.0, keep_page: bool = False): """Reloads the response in Chromium, and replaces HTML content with an updated version, with JavaScript executed. :param retries: The number of times to retry loading the page in Chromium. :param script: JavaScript to execute upon page load (optional). :param wait: The number of seconds to wait before loading the page, preventing timeouts (optional). :param scrolldown: Integer, if provided, of how many times to page down. :param sleep: Integer, if provided, of how many long to sleep after initial render. :param reload: If ``False``, content will not be loaded from the browser, but will be provided from memory. :param keep_page: If ``True`` will allow you to interact with the browser page through ``r.html.page``. If ``scrolldown`` is specified, the page will scrolldown the specified number of times, after sleeping the specified amount of time (e.g. ``scrolldown=10, sleep=1``). If just ``sleep`` is provided, the rendering will wait *n* seconds, before returning. If ``script`` is specified, it will execute the provided JavaScript at runtime. Example: .. code-block:: python script = \"\"\" () => { return { width: document.documentElement.clientWidth, height: document.documentElement.clientHeight, deviceScaleFactor: window.devicePixelRatio, } } \"\"\" Returns the return value of the executed ``script``, if any is provided: .. code-block:: python >>> r.html.render(script=script) {'width': 800, 'height': 600, 'deviceScaleFactor': 1} Warning: the first time you run this method, it will download Chromium into your home directory (``~/.pyppeteer``). """ self.browser = self.session.browser # Automatically create a event loop and browser content = None # Automatically set Reload to False, if example URL is being used. if self.url == DEFAULT_URL: reload = False for i in range(retries): if not content: try: content, result, page = self.session.loop.run_until_complete(self._async_render(url=self.url, script=script, sleep=sleep, wait=wait, content=self.html, reload=reload, scrolldown=scrolldown, timeout=timeout, keep_page=keep_page)) except TypeError: pass else: break if not content: raise MaxRetries("Unable to render the page. Try increasing timeout") html = HTML(url=self.url, html=content.encode(DEFAULT_ENCODING), default_encoding=DEFAULT_ENCODING) self.__dict__.update(html.__dict__) self.page = page return result
[ "def", "render", "(", "self", ",", "retries", ":", "int", "=", "8", ",", "script", ":", "str", "=", "None", ",", "wait", ":", "float", "=", "0.2", ",", "scrolldown", "=", "False", ",", "sleep", ":", "int", "=", "0", ",", "reload", ":", "bool", "=", "True", ",", "timeout", ":", "Union", "[", "float", ",", "int", "]", "=", "8.0", ",", "keep_page", ":", "bool", "=", "False", ")", ":", "self", ".", "browser", "=", "self", ".", "session", ".", "browser", "# Automatically create a event loop and browser", "content", "=", "None", "# Automatically set Reload to False, if example URL is being used.", "if", "self", ".", "url", "==", "DEFAULT_URL", ":", "reload", "=", "False", "for", "i", "in", "range", "(", "retries", ")", ":", "if", "not", "content", ":", "try", ":", "content", ",", "result", ",", "page", "=", "self", ".", "session", ".", "loop", ".", "run_until_complete", "(", "self", ".", "_async_render", "(", "url", "=", "self", ".", "url", ",", "script", "=", "script", ",", "sleep", "=", "sleep", ",", "wait", "=", "wait", ",", "content", "=", "self", ".", "html", ",", "reload", "=", "reload", ",", "scrolldown", "=", "scrolldown", ",", "timeout", "=", "timeout", ",", "keep_page", "=", "keep_page", ")", ")", "except", "TypeError", ":", "pass", "else", ":", "break", "if", "not", "content", ":", "raise", "MaxRetries", "(", "\"Unable to render the page. Try increasing timeout\"", ")", "html", "=", "HTML", "(", "url", "=", "self", ".", "url", ",", "html", "=", "content", ".", "encode", "(", "DEFAULT_ENCODING", ")", ",", "default_encoding", "=", "DEFAULT_ENCODING", ")", "self", ".", "__dict__", ".", "update", "(", "html", ".", "__dict__", ")", "self", ".", "page", "=", "page", "return", "result" ]
Reloads the response in Chromium, and replaces HTML content with an updated version, with JavaScript executed. :param retries: The number of times to retry loading the page in Chromium. :param script: JavaScript to execute upon page load (optional). :param wait: The number of seconds to wait before loading the page, preventing timeouts (optional). :param scrolldown: Integer, if provided, of how many times to page down. :param sleep: Integer, if provided, of how many long to sleep after initial render. :param reload: If ``False``, content will not be loaded from the browser, but will be provided from memory. :param keep_page: If ``True`` will allow you to interact with the browser page through ``r.html.page``. If ``scrolldown`` is specified, the page will scrolldown the specified number of times, after sleeping the specified amount of time (e.g. ``scrolldown=10, sleep=1``). If just ``sleep`` is provided, the rendering will wait *n* seconds, before returning. If ``script`` is specified, it will execute the provided JavaScript at runtime. Example: .. code-block:: python script = \"\"\" () => { return { width: document.documentElement.clientWidth, height: document.documentElement.clientHeight, deviceScaleFactor: window.devicePixelRatio, } } \"\"\" Returns the return value of the executed ``script``, if any is provided: .. code-block:: python >>> r.html.render(script=script) {'width': 800, 'height': 600, 'deviceScaleFactor': 1} Warning: the first time you run this method, it will download Chromium into your home directory (``~/.pyppeteer``).
[ "Reloads", "the", "response", "in", "Chromium", "and", "replaces", "HTML", "content", "with", "an", "updated", "version", "with", "JavaScript", "executed", "." ]
b59a9f2fb9333d7d467154a0fd82978efdb9d23b
https://github.com/kennethreitz/requests-html/blob/b59a9f2fb9333d7d467154a0fd82978efdb9d23b/requests_html.py#L541-L610
train
kennethreitz/requests-html
requests_html.py
BaseSession.response_hook
def response_hook(self, response, **kwargs) -> HTMLResponse: """ Change response enconding and replace it by a HTMLResponse. """ if not response.encoding: response.encoding = DEFAULT_ENCODING return HTMLResponse._from_response(response, self)
python
def response_hook(self, response, **kwargs) -> HTMLResponse: """ Change response enconding and replace it by a HTMLResponse. """ if not response.encoding: response.encoding = DEFAULT_ENCODING return HTMLResponse._from_response(response, self)
[ "def", "response_hook", "(", "self", ",", "response", ",", "*", "*", "kwargs", ")", "->", "HTMLResponse", ":", "if", "not", "response", ".", "encoding", ":", "response", ".", "encoding", "=", "DEFAULT_ENCODING", "return", "HTMLResponse", ".", "_from_response", "(", "response", ",", "self", ")" ]
Change response enconding and replace it by a HTMLResponse.
[ "Change", "response", "enconding", "and", "replace", "it", "by", "a", "HTMLResponse", "." ]
b59a9f2fb9333d7d467154a0fd82978efdb9d23b
https://github.com/kennethreitz/requests-html/blob/b59a9f2fb9333d7d467154a0fd82978efdb9d23b/requests_html.py#L705-L709
train
kennethreitz/requests-html
requests_html.py
HTMLSession.close
def close(self): """ If a browser was created close it first. """ if hasattr(self, "_browser"): self.loop.run_until_complete(self._browser.close()) super().close()
python
def close(self): """ If a browser was created close it first. """ if hasattr(self, "_browser"): self.loop.run_until_complete(self._browser.close()) super().close()
[ "def", "close", "(", "self", ")", ":", "if", "hasattr", "(", "self", ",", "\"_browser\"", ")", ":", "self", ".", "loop", ".", "run_until_complete", "(", "self", ".", "_browser", ".", "close", "(", ")", ")", "super", "(", ")", ".", "close", "(", ")" ]
If a browser was created close it first.
[ "If", "a", "browser", "was", "created", "close", "it", "first", "." ]
b59a9f2fb9333d7d467154a0fd82978efdb9d23b
https://github.com/kennethreitz/requests-html/blob/b59a9f2fb9333d7d467154a0fd82978efdb9d23b/requests_html.py#L733-L737
train
kennethreitz/requests-html
requests_html.py
AsyncHTMLSession.request
def request(self, *args, **kwargs): """ Partial original request func and run it in a thread. """ func = partial(super().request, *args, **kwargs) return self.loop.run_in_executor(self.thread_pool, func)
python
def request(self, *args, **kwargs): """ Partial original request func and run it in a thread. """ func = partial(super().request, *args, **kwargs) return self.loop.run_in_executor(self.thread_pool, func)
[ "def", "request", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "func", "=", "partial", "(", "super", "(", ")", ".", "request", ",", "*", "args", ",", "*", "*", "kwargs", ")", "return", "self", ".", "loop", ".", "run_in_executor", "(", "self", ".", "thread_pool", ",", "func", ")" ]
Partial original request func and run it in a thread.
[ "Partial", "original", "request", "func", "and", "run", "it", "in", "a", "thread", "." ]
b59a9f2fb9333d7d467154a0fd82978efdb9d23b
https://github.com/kennethreitz/requests-html/blob/b59a9f2fb9333d7d467154a0fd82978efdb9d23b/requests_html.py#L756-L759
train
kennethreitz/requests-html
requests_html.py
AsyncHTMLSession.run
def run(self, *coros): """ Pass in all the coroutines you want to run, it will wrap each one in a task, run it and wait for the result. Return a list with all results, this is returned in the same order coros are passed in. """ tasks = [ asyncio.ensure_future(coro()) for coro in coros ] done, _ = self.loop.run_until_complete(asyncio.wait(tasks)) return [t.result() for t in done]
python
def run(self, *coros): """ Pass in all the coroutines you want to run, it will wrap each one in a task, run it and wait for the result. Return a list with all results, this is returned in the same order coros are passed in. """ tasks = [ asyncio.ensure_future(coro()) for coro in coros ] done, _ = self.loop.run_until_complete(asyncio.wait(tasks)) return [t.result() for t in done]
[ "def", "run", "(", "self", ",", "*", "coros", ")", ":", "tasks", "=", "[", "asyncio", ".", "ensure_future", "(", "coro", "(", ")", ")", "for", "coro", "in", "coros", "]", "done", ",", "_", "=", "self", ".", "loop", ".", "run_until_complete", "(", "asyncio", ".", "wait", "(", "tasks", ")", ")", "return", "[", "t", ".", "result", "(", ")", "for", "t", "in", "done", "]" ]
Pass in all the coroutines you want to run, it will wrap each one in a task, run it and wait for the result. Return a list with all results, this is returned in the same order coros are passed in.
[ "Pass", "in", "all", "the", "coroutines", "you", "want", "to", "run", "it", "will", "wrap", "each", "one", "in", "a", "task", "run", "it", "and", "wait", "for", "the", "result", ".", "Return", "a", "list", "with", "all", "results", "this", "is", "returned", "in", "the", "same", "order", "coros", "are", "passed", "in", "." ]
b59a9f2fb9333d7d467154a0fd82978efdb9d23b
https://github.com/kennethreitz/requests-html/blob/b59a9f2fb9333d7d467154a0fd82978efdb9d23b/requests_html.py#L767-L775
train
Microsoft/nni
examples/trials/kaggle-tgs-salt/loader.py
add_depth_channel
def add_depth_channel(img_tensor, pad_mode): ''' img_tensor: N, C, H, W ''' img_tensor[:, 1] = get_depth_tensor(pad_mode) img_tensor[:, 2] = img_tensor[:, 0] * get_depth_tensor(pad_mode)
python
def add_depth_channel(img_tensor, pad_mode): ''' img_tensor: N, C, H, W ''' img_tensor[:, 1] = get_depth_tensor(pad_mode) img_tensor[:, 2] = img_tensor[:, 0] * get_depth_tensor(pad_mode)
[ "def", "add_depth_channel", "(", "img_tensor", ",", "pad_mode", ")", ":", "img_tensor", "[", ":", ",", "1", "]", "=", "get_depth_tensor", "(", "pad_mode", ")", "img_tensor", "[", ":", ",", "2", "]", "=", "img_tensor", "[", ":", ",", "0", "]", "*", "get_depth_tensor", "(", "pad_mode", ")" ]
img_tensor: N, C, H, W
[ "img_tensor", ":", "N", "C", "H", "W" ]
c7cc8db32da8d2ec77a382a55089f4e17247ce41
https://github.com/Microsoft/nni/blob/c7cc8db32da8d2ec77a382a55089f4e17247ce41/examples/trials/kaggle-tgs-salt/loader.py#L256-L261
train
Microsoft/nni
examples/trials/weight_sharing/ga_squad/attention.py
DotAttention.get_pre_compute
def get_pre_compute(self, s): ''' :param s: [src_sequence, batch_size, src_dim] :return: [src_sequence, batch_size. hidden_dim] ''' hidden_dim = self.hidden_dim src_dim = s.get_shape().as_list()[-1] assert src_dim is not None, 'src dim must be defined' W = self._get_var('W', shape=[src_dim, hidden_dim]) b = self._get_var('b', shape=[1, hidden_dim]) return tf.tensordot(s, W, [[2], [0]]) + b
python
def get_pre_compute(self, s): ''' :param s: [src_sequence, batch_size, src_dim] :return: [src_sequence, batch_size. hidden_dim] ''' hidden_dim = self.hidden_dim src_dim = s.get_shape().as_list()[-1] assert src_dim is not None, 'src dim must be defined' W = self._get_var('W', shape=[src_dim, hidden_dim]) b = self._get_var('b', shape=[1, hidden_dim]) return tf.tensordot(s, W, [[2], [0]]) + b
[ "def", "get_pre_compute", "(", "self", ",", "s", ")", ":", "hidden_dim", "=", "self", ".", "hidden_dim", "src_dim", "=", "s", ".", "get_shape", "(", ")", ".", "as_list", "(", ")", "[", "-", "1", "]", "assert", "src_dim", "is", "not", "None", ",", "'src dim must be defined'", "W", "=", "self", ".", "_get_var", "(", "'W'", ",", "shape", "=", "[", "src_dim", ",", "hidden_dim", "]", ")", "b", "=", "self", ".", "_get_var", "(", "'b'", ",", "shape", "=", "[", "1", ",", "hidden_dim", "]", ")", "return", "tf", ".", "tensordot", "(", "s", ",", "W", ",", "[", "[", "2", "]", ",", "[", "0", "]", "]", ")", "+", "b" ]
:param s: [src_sequence, batch_size, src_dim] :return: [src_sequence, batch_size. hidden_dim]
[ ":", "param", "s", ":", "[", "src_sequence", "batch_size", "src_dim", "]", ":", "return", ":", "[", "src_sequence", "batch_size", ".", "hidden_dim", "]" ]
c7cc8db32da8d2ec77a382a55089f4e17247ce41
https://github.com/Microsoft/nni/blob/c7cc8db32da8d2ec77a382a55089f4e17247ce41/examples/trials/weight_sharing/ga_squad/attention.py#L94-L104
train
Microsoft/nni
examples/trials/weight_sharing/ga_squad/attention.py
DotAttention.get_prob
def get_prob(self, src, tgt, mask, pre_compute, return_logits=False): ''' :param s: [src_sequence_length, batch_size, src_dim] :param h: [batch_size, tgt_dim] or [tgt_sequence_length, batch_size, tgt_dim] :param mask: [src_sequence_length, batch_size]\ or [tgt_sequence_length, src_sequence_length, batch_sizse] :param pre_compute: [src_sequence_length, batch_size, hidden_dim] :return: [src_sequence_length, batch_size]\ or [tgt_sequence_length, src_sequence_length, batch_size] ''' s_shape = src.get_shape().as_list() h_shape = tgt.get_shape().as_list() src_dim = s_shape[-1] tgt_dim = h_shape[-1] assert src_dim is not None, 'src dimension must be defined' assert tgt_dim is not None, 'tgt dimension must be defined' self._define_params(src_dim, tgt_dim) if len(h_shape) == 2: tgt = tf.expand_dims(tgt, 0) if pre_compute is None: pre_compute = self.get_pre_compute(src) buf0 = pre_compute buf1 = tf.tensordot(tgt, self.var['U'], axes=[[2], [0]]) buf2 = tf.tanh(tf.expand_dims(buf0, 0) + tf.expand_dims(buf1, 1)) if not self.is_vanilla: xh1 = tgt xh2 = tgt s1 = src if self.need_padding: xh1 = tf.tensordot(xh1, self.var['V_t'], 1) xh2 = tf.tensordot(xh2, self.var['S_t'], 1) s1 = tf.tensordot(s1, self.var['V_s'], 1) if not self.is_identity_transform: xh1 = tf.tensordot(xh1, self.var['T'], 1) xh2 = tf.tensordot(xh2, self.var['T'], 1) buf3 = tf.expand_dims(s1, 0) * tf.expand_dims(xh1, 1) buf3 = tf.tanh(tf.tensordot(buf3, self.var['V'], axes=[[3], [0]])) buf = tf.reshape(tf.tanh(buf2 + buf3), shape=tf.shape(buf3)) else: buf = buf2 v = self.var['v'] e = tf.tensordot(buf, v, [[3], [0]]) e = tf.squeeze(e, axis=[3]) tmp = tf.reshape(e + (mask - 1) * 10000.0, shape=tf.shape(e)) prob = tf.nn.softmax(tmp, 1) if len(h_shape) == 2: prob = tf.squeeze(prob, axis=[0]) tmp = tf.squeeze(tmp, axis=[0]) if return_logits: return prob, tmp return prob
python
def get_prob(self, src, tgt, mask, pre_compute, return_logits=False): ''' :param s: [src_sequence_length, batch_size, src_dim] :param h: [batch_size, tgt_dim] or [tgt_sequence_length, batch_size, tgt_dim] :param mask: [src_sequence_length, batch_size]\ or [tgt_sequence_length, src_sequence_length, batch_sizse] :param pre_compute: [src_sequence_length, batch_size, hidden_dim] :return: [src_sequence_length, batch_size]\ or [tgt_sequence_length, src_sequence_length, batch_size] ''' s_shape = src.get_shape().as_list() h_shape = tgt.get_shape().as_list() src_dim = s_shape[-1] tgt_dim = h_shape[-1] assert src_dim is not None, 'src dimension must be defined' assert tgt_dim is not None, 'tgt dimension must be defined' self._define_params(src_dim, tgt_dim) if len(h_shape) == 2: tgt = tf.expand_dims(tgt, 0) if pre_compute is None: pre_compute = self.get_pre_compute(src) buf0 = pre_compute buf1 = tf.tensordot(tgt, self.var['U'], axes=[[2], [0]]) buf2 = tf.tanh(tf.expand_dims(buf0, 0) + tf.expand_dims(buf1, 1)) if not self.is_vanilla: xh1 = tgt xh2 = tgt s1 = src if self.need_padding: xh1 = tf.tensordot(xh1, self.var['V_t'], 1) xh2 = tf.tensordot(xh2, self.var['S_t'], 1) s1 = tf.tensordot(s1, self.var['V_s'], 1) if not self.is_identity_transform: xh1 = tf.tensordot(xh1, self.var['T'], 1) xh2 = tf.tensordot(xh2, self.var['T'], 1) buf3 = tf.expand_dims(s1, 0) * tf.expand_dims(xh1, 1) buf3 = tf.tanh(tf.tensordot(buf3, self.var['V'], axes=[[3], [0]])) buf = tf.reshape(tf.tanh(buf2 + buf3), shape=tf.shape(buf3)) else: buf = buf2 v = self.var['v'] e = tf.tensordot(buf, v, [[3], [0]]) e = tf.squeeze(e, axis=[3]) tmp = tf.reshape(e + (mask - 1) * 10000.0, shape=tf.shape(e)) prob = tf.nn.softmax(tmp, 1) if len(h_shape) == 2: prob = tf.squeeze(prob, axis=[0]) tmp = tf.squeeze(tmp, axis=[0]) if return_logits: return prob, tmp return prob
[ "def", "get_prob", "(", "self", ",", "src", ",", "tgt", ",", "mask", ",", "pre_compute", ",", "return_logits", "=", "False", ")", ":", "s_shape", "=", "src", ".", "get_shape", "(", ")", ".", "as_list", "(", ")", "h_shape", "=", "tgt", ".", "get_shape", "(", ")", ".", "as_list", "(", ")", "src_dim", "=", "s_shape", "[", "-", "1", "]", "tgt_dim", "=", "h_shape", "[", "-", "1", "]", "assert", "src_dim", "is", "not", "None", ",", "'src dimension must be defined'", "assert", "tgt_dim", "is", "not", "None", ",", "'tgt dimension must be defined'", "self", ".", "_define_params", "(", "src_dim", ",", "tgt_dim", ")", "if", "len", "(", "h_shape", ")", "==", "2", ":", "tgt", "=", "tf", ".", "expand_dims", "(", "tgt", ",", "0", ")", "if", "pre_compute", "is", "None", ":", "pre_compute", "=", "self", ".", "get_pre_compute", "(", "src", ")", "buf0", "=", "pre_compute", "buf1", "=", "tf", ".", "tensordot", "(", "tgt", ",", "self", ".", "var", "[", "'U'", "]", ",", "axes", "=", "[", "[", "2", "]", ",", "[", "0", "]", "]", ")", "buf2", "=", "tf", ".", "tanh", "(", "tf", ".", "expand_dims", "(", "buf0", ",", "0", ")", "+", "tf", ".", "expand_dims", "(", "buf1", ",", "1", ")", ")", "if", "not", "self", ".", "is_vanilla", ":", "xh1", "=", "tgt", "xh2", "=", "tgt", "s1", "=", "src", "if", "self", ".", "need_padding", ":", "xh1", "=", "tf", ".", "tensordot", "(", "xh1", ",", "self", ".", "var", "[", "'V_t'", "]", ",", "1", ")", "xh2", "=", "tf", ".", "tensordot", "(", "xh2", ",", "self", ".", "var", "[", "'S_t'", "]", ",", "1", ")", "s1", "=", "tf", ".", "tensordot", "(", "s1", ",", "self", ".", "var", "[", "'V_s'", "]", ",", "1", ")", "if", "not", "self", ".", "is_identity_transform", ":", "xh1", "=", "tf", ".", "tensordot", "(", "xh1", ",", "self", ".", "var", "[", "'T'", "]", ",", "1", ")", "xh2", "=", "tf", ".", "tensordot", "(", "xh2", ",", "self", ".", "var", "[", "'T'", "]", ",", "1", ")", "buf3", "=", "tf", ".", "expand_dims", "(", "s1", ",", "0", ")", "*", "tf", ".", "expand_dims", "(", "xh1", ",", "1", ")", "buf3", "=", "tf", ".", "tanh", "(", "tf", ".", "tensordot", "(", "buf3", ",", "self", ".", "var", "[", "'V'", "]", ",", "axes", "=", "[", "[", "3", "]", ",", "[", "0", "]", "]", ")", ")", "buf", "=", "tf", ".", "reshape", "(", "tf", ".", "tanh", "(", "buf2", "+", "buf3", ")", ",", "shape", "=", "tf", ".", "shape", "(", "buf3", ")", ")", "else", ":", "buf", "=", "buf2", "v", "=", "self", ".", "var", "[", "'v'", "]", "e", "=", "tf", ".", "tensordot", "(", "buf", ",", "v", ",", "[", "[", "3", "]", ",", "[", "0", "]", "]", ")", "e", "=", "tf", ".", "squeeze", "(", "e", ",", "axis", "=", "[", "3", "]", ")", "tmp", "=", "tf", ".", "reshape", "(", "e", "+", "(", "mask", "-", "1", ")", "*", "10000.0", ",", "shape", "=", "tf", ".", "shape", "(", "e", ")", ")", "prob", "=", "tf", ".", "nn", ".", "softmax", "(", "tmp", ",", "1", ")", "if", "len", "(", "h_shape", ")", "==", "2", ":", "prob", "=", "tf", ".", "squeeze", "(", "prob", ",", "axis", "=", "[", "0", "]", ")", "tmp", "=", "tf", ".", "squeeze", "(", "tmp", ",", "axis", "=", "[", "0", "]", ")", "if", "return_logits", ":", "return", "prob", ",", "tmp", "return", "prob" ]
:param s: [src_sequence_length, batch_size, src_dim] :param h: [batch_size, tgt_dim] or [tgt_sequence_length, batch_size, tgt_dim] :param mask: [src_sequence_length, batch_size]\ or [tgt_sequence_length, src_sequence_length, batch_sizse] :param pre_compute: [src_sequence_length, batch_size, hidden_dim] :return: [src_sequence_length, batch_size]\ or [tgt_sequence_length, src_sequence_length, batch_size]
[ ":", "param", "s", ":", "[", "src_sequence_length", "batch_size", "src_dim", "]", ":", "param", "h", ":", "[", "batch_size", "tgt_dim", "]", "or", "[", "tgt_sequence_length", "batch_size", "tgt_dim", "]", ":", "param", "mask", ":", "[", "src_sequence_length", "batch_size", "]", "\\", "or", "[", "tgt_sequence_length", "src_sequence_length", "batch_sizse", "]", ":", "param", "pre_compute", ":", "[", "src_sequence_length", "batch_size", "hidden_dim", "]", ":", "return", ":", "[", "src_sequence_length", "batch_size", "]", "\\", "or", "[", "tgt_sequence_length", "src_sequence_length", "batch_size", "]" ]
c7cc8db32da8d2ec77a382a55089f4e17247ce41
https://github.com/Microsoft/nni/blob/c7cc8db32da8d2ec77a382a55089f4e17247ce41/examples/trials/weight_sharing/ga_squad/attention.py#L106-L160
train
Microsoft/nni
examples/trials/weight_sharing/ga_squad/attention.py
DotAttention.get_att
def get_att(self, s, prob): ''' :param s: [src_sequence_length, batch_size, src_dim] :param prob: [src_sequence_length, batch_size]\ or [tgt_sequence_length, src_sequence_length, batch_size] :return: [batch_size, src_dim] or [tgt_sequence_length, batch_size, src_dim] ''' buf = s * tf.expand_dims(prob, axis=-1) att = tf.reduce_sum(buf, axis=-3) return att
python
def get_att(self, s, prob): ''' :param s: [src_sequence_length, batch_size, src_dim] :param prob: [src_sequence_length, batch_size]\ or [tgt_sequence_length, src_sequence_length, batch_size] :return: [batch_size, src_dim] or [tgt_sequence_length, batch_size, src_dim] ''' buf = s * tf.expand_dims(prob, axis=-1) att = tf.reduce_sum(buf, axis=-3) return att
[ "def", "get_att", "(", "self", ",", "s", ",", "prob", ")", ":", "buf", "=", "s", "*", "tf", ".", "expand_dims", "(", "prob", ",", "axis", "=", "-", "1", ")", "att", "=", "tf", ".", "reduce_sum", "(", "buf", ",", "axis", "=", "-", "3", ")", "return", "att" ]
:param s: [src_sequence_length, batch_size, src_dim] :param prob: [src_sequence_length, batch_size]\ or [tgt_sequence_length, src_sequence_length, batch_size] :return: [batch_size, src_dim] or [tgt_sequence_length, batch_size, src_dim]
[ ":", "param", "s", ":", "[", "src_sequence_length", "batch_size", "src_dim", "]", ":", "param", "prob", ":", "[", "src_sequence_length", "batch_size", "]", "\\", "or", "[", "tgt_sequence_length", "src_sequence_length", "batch_size", "]", ":", "return", ":", "[", "batch_size", "src_dim", "]", "or", "[", "tgt_sequence_length", "batch_size", "src_dim", "]" ]
c7cc8db32da8d2ec77a382a55089f4e17247ce41
https://github.com/Microsoft/nni/blob/c7cc8db32da8d2ec77a382a55089f4e17247ce41/examples/trials/weight_sharing/ga_squad/attention.py#L162-L171
train
Microsoft/nni
examples/trials/weight_sharing/ga_squad/util.py
shape
def shape(tensor): ''' Get shape of variable. Return type is tuple. ''' temp_s = tensor.get_shape() return tuple([temp_s[i].value for i in range(0, len(temp_s))])
python
def shape(tensor): ''' Get shape of variable. Return type is tuple. ''' temp_s = tensor.get_shape() return tuple([temp_s[i].value for i in range(0, len(temp_s))])
[ "def", "shape", "(", "tensor", ")", ":", "temp_s", "=", "tensor", ".", "get_shape", "(", ")", "return", "tuple", "(", "[", "temp_s", "[", "i", "]", ".", "value", "for", "i", "in", "range", "(", "0", ",", "len", "(", "temp_s", ")", ")", "]", ")" ]
Get shape of variable. Return type is tuple.
[ "Get", "shape", "of", "variable", ".", "Return", "type", "is", "tuple", "." ]
c7cc8db32da8d2ec77a382a55089f4e17247ce41
https://github.com/Microsoft/nni/blob/c7cc8db32da8d2ec77a382a55089f4e17247ce41/examples/trials/weight_sharing/ga_squad/util.py#L30-L36
train
Microsoft/nni
examples/trials/weight_sharing/ga_squad/util.py
get_variable
def get_variable(name, temp_s): ''' Get variable by name. ''' return tf.Variable(tf.zeros(temp_s), name=name)
python
def get_variable(name, temp_s): ''' Get variable by name. ''' return tf.Variable(tf.zeros(temp_s), name=name)
[ "def", "get_variable", "(", "name", ",", "temp_s", ")", ":", "return", "tf", ".", "Variable", "(", "tf", ".", "zeros", "(", "temp_s", ")", ",", "name", "=", "name", ")" ]
Get variable by name.
[ "Get", "variable", "by", "name", "." ]
c7cc8db32da8d2ec77a382a55089f4e17247ce41
https://github.com/Microsoft/nni/blob/c7cc8db32da8d2ec77a382a55089f4e17247ce41/examples/trials/weight_sharing/ga_squad/util.py#L39-L43
train
Microsoft/nni
examples/trials/weight_sharing/ga_squad/util.py
dropout
def dropout(tensor, drop_prob, is_training): ''' Dropout except test. ''' if not is_training: return tensor return tf.nn.dropout(tensor, 1.0 - drop_prob)
python
def dropout(tensor, drop_prob, is_training): ''' Dropout except test. ''' if not is_training: return tensor return tf.nn.dropout(tensor, 1.0 - drop_prob)
[ "def", "dropout", "(", "tensor", ",", "drop_prob", ",", "is_training", ")", ":", "if", "not", "is_training", ":", "return", "tensor", "return", "tf", ".", "nn", ".", "dropout", "(", "tensor", ",", "1.0", "-", "drop_prob", ")" ]
Dropout except test.
[ "Dropout", "except", "test", "." ]
c7cc8db32da8d2ec77a382a55089f4e17247ce41
https://github.com/Microsoft/nni/blob/c7cc8db32da8d2ec77a382a55089f4e17247ce41/examples/trials/weight_sharing/ga_squad/util.py#L46-L52
train
Microsoft/nni
examples/trials/weight_sharing/ga_squad/util.py
Timer.get_elapsed
def get_elapsed(self, restart=True): ''' Calculate time span. ''' end = time.time() span = end - self.__start if restart: self.__start = end return span
python
def get_elapsed(self, restart=True): ''' Calculate time span. ''' end = time.time() span = end - self.__start if restart: self.__start = end return span
[ "def", "get_elapsed", "(", "self", ",", "restart", "=", "True", ")", ":", "end", "=", "time", ".", "time", "(", ")", "span", "=", "end", "-", "self", ".", "__start", "if", "restart", ":", "self", ".", "__start", "=", "end", "return", "span" ]
Calculate time span.
[ "Calculate", "time", "span", "." ]
c7cc8db32da8d2ec77a382a55089f4e17247ce41
https://github.com/Microsoft/nni/blob/c7cc8db32da8d2ec77a382a55089f4e17247ce41/examples/trials/weight_sharing/ga_squad/util.py#L68-L76
train
Microsoft/nni
examples/trials/kaggle-tgs-salt/predict.py
do_tta_predict
def do_tta_predict(args, model, ckp_path, tta_num=4): ''' return 18000x128x128 np array ''' model.eval() preds = [] meta = None # i is tta index, 0: no change, 1: horizon flip, 2: vertical flip, 3: do both for flip_index in range(tta_num): print('flip_index:', flip_index) test_loader = get_test_loader(args.batch_size, index=flip_index, dev_mode=False, pad_mode=args.pad_mode) meta = test_loader.meta outputs = None with torch.no_grad(): for i, img in enumerate(test_loader): add_depth_channel(img, args.pad_mode) img = img.cuda() output, _ = model(img) output = torch.sigmoid(output) if outputs is None: outputs = output.squeeze() else: outputs = torch.cat([outputs, output.squeeze()], 0) print('{} / {}'.format(args.batch_size*(i+1), test_loader.num), end='\r') outputs = outputs.cpu().numpy() # flip back masks if flip_index == 1: outputs = np.flip(outputs, 2) elif flip_index == 2: outputs = np.flip(outputs, 1) elif flip_index == 3: outputs = np.flip(outputs, 2) outputs = np.flip(outputs, 1) #print(outputs.shape) preds.append(outputs) parent_dir = ckp_path+'_out' if not os.path.exists(parent_dir): os.makedirs(parent_dir) np_file = os.path.join(parent_dir, 'pred.npy') model_pred_result = np.mean(preds, 0) np.save(np_file, model_pred_result) return model_pred_result, meta
python
def do_tta_predict(args, model, ckp_path, tta_num=4): ''' return 18000x128x128 np array ''' model.eval() preds = [] meta = None # i is tta index, 0: no change, 1: horizon flip, 2: vertical flip, 3: do both for flip_index in range(tta_num): print('flip_index:', flip_index) test_loader = get_test_loader(args.batch_size, index=flip_index, dev_mode=False, pad_mode=args.pad_mode) meta = test_loader.meta outputs = None with torch.no_grad(): for i, img in enumerate(test_loader): add_depth_channel(img, args.pad_mode) img = img.cuda() output, _ = model(img) output = torch.sigmoid(output) if outputs is None: outputs = output.squeeze() else: outputs = torch.cat([outputs, output.squeeze()], 0) print('{} / {}'.format(args.batch_size*(i+1), test_loader.num), end='\r') outputs = outputs.cpu().numpy() # flip back masks if flip_index == 1: outputs = np.flip(outputs, 2) elif flip_index == 2: outputs = np.flip(outputs, 1) elif flip_index == 3: outputs = np.flip(outputs, 2) outputs = np.flip(outputs, 1) #print(outputs.shape) preds.append(outputs) parent_dir = ckp_path+'_out' if not os.path.exists(parent_dir): os.makedirs(parent_dir) np_file = os.path.join(parent_dir, 'pred.npy') model_pred_result = np.mean(preds, 0) np.save(np_file, model_pred_result) return model_pred_result, meta
[ "def", "do_tta_predict", "(", "args", ",", "model", ",", "ckp_path", ",", "tta_num", "=", "4", ")", ":", "model", ".", "eval", "(", ")", "preds", "=", "[", "]", "meta", "=", "None", "# i is tta index, 0: no change, 1: horizon flip, 2: vertical flip, 3: do both", "for", "flip_index", "in", "range", "(", "tta_num", ")", ":", "print", "(", "'flip_index:'", ",", "flip_index", ")", "test_loader", "=", "get_test_loader", "(", "args", ".", "batch_size", ",", "index", "=", "flip_index", ",", "dev_mode", "=", "False", ",", "pad_mode", "=", "args", ".", "pad_mode", ")", "meta", "=", "test_loader", ".", "meta", "outputs", "=", "None", "with", "torch", ".", "no_grad", "(", ")", ":", "for", "i", ",", "img", "in", "enumerate", "(", "test_loader", ")", ":", "add_depth_channel", "(", "img", ",", "args", ".", "pad_mode", ")", "img", "=", "img", ".", "cuda", "(", ")", "output", ",", "_", "=", "model", "(", "img", ")", "output", "=", "torch", ".", "sigmoid", "(", "output", ")", "if", "outputs", "is", "None", ":", "outputs", "=", "output", ".", "squeeze", "(", ")", "else", ":", "outputs", "=", "torch", ".", "cat", "(", "[", "outputs", ",", "output", ".", "squeeze", "(", ")", "]", ",", "0", ")", "print", "(", "'{} / {}'", ".", "format", "(", "args", ".", "batch_size", "*", "(", "i", "+", "1", ")", ",", "test_loader", ".", "num", ")", ",", "end", "=", "'\\r'", ")", "outputs", "=", "outputs", ".", "cpu", "(", ")", ".", "numpy", "(", ")", "# flip back masks", "if", "flip_index", "==", "1", ":", "outputs", "=", "np", ".", "flip", "(", "outputs", ",", "2", ")", "elif", "flip_index", "==", "2", ":", "outputs", "=", "np", ".", "flip", "(", "outputs", ",", "1", ")", "elif", "flip_index", "==", "3", ":", "outputs", "=", "np", ".", "flip", "(", "outputs", ",", "2", ")", "outputs", "=", "np", ".", "flip", "(", "outputs", ",", "1", ")", "#print(outputs.shape)", "preds", ".", "append", "(", "outputs", ")", "parent_dir", "=", "ckp_path", "+", "'_out'", "if", "not", "os", ".", "path", ".", "exists", "(", "parent_dir", ")", ":", "os", ".", "makedirs", "(", "parent_dir", ")", "np_file", "=", "os", ".", "path", ".", "join", "(", "parent_dir", ",", "'pred.npy'", ")", "model_pred_result", "=", "np", ".", "mean", "(", "preds", ",", "0", ")", "np", ".", "save", "(", "np_file", ",", "model_pred_result", ")", "return", "model_pred_result", ",", "meta" ]
return 18000x128x128 np array
[ "return", "18000x128x128", "np", "array" ]
c7cc8db32da8d2ec77a382a55089f4e17247ce41
https://github.com/Microsoft/nni/blob/c7cc8db32da8d2ec77a382a55089f4e17247ce41/examples/trials/kaggle-tgs-salt/predict.py#L37-L83
train
Microsoft/nni
examples/trials/mnist-distributed-pytorch/dist_mnist.py
partition_dataset
def partition_dataset(): """ Partitioning MNIST """ dataset = datasets.MNIST( './data', train=True, download=True, transform=transforms.Compose([ transforms.ToTensor(), transforms.Normalize((0.1307, ), (0.3081, )) ])) size = dist.get_world_size() bsz = 128 / float(size) partition_sizes = [1.0 / size for _ in range(size)] partition = DataPartitioner(dataset, partition_sizes) partition = partition.use(dist.get_rank()) train_set = torch.utils.data.DataLoader( partition, batch_size=int(bsz), shuffle=True) return train_set, bsz
python
def partition_dataset(): """ Partitioning MNIST """ dataset = datasets.MNIST( './data', train=True, download=True, transform=transforms.Compose([ transforms.ToTensor(), transforms.Normalize((0.1307, ), (0.3081, )) ])) size = dist.get_world_size() bsz = 128 / float(size) partition_sizes = [1.0 / size for _ in range(size)] partition = DataPartitioner(dataset, partition_sizes) partition = partition.use(dist.get_rank()) train_set = torch.utils.data.DataLoader( partition, batch_size=int(bsz), shuffle=True) return train_set, bsz
[ "def", "partition_dataset", "(", ")", ":", "dataset", "=", "datasets", ".", "MNIST", "(", "'./data'", ",", "train", "=", "True", ",", "download", "=", "True", ",", "transform", "=", "transforms", ".", "Compose", "(", "[", "transforms", ".", "ToTensor", "(", ")", ",", "transforms", ".", "Normalize", "(", "(", "0.1307", ",", ")", ",", "(", "0.3081", ",", ")", ")", "]", ")", ")", "size", "=", "dist", ".", "get_world_size", "(", ")", "bsz", "=", "128", "/", "float", "(", "size", ")", "partition_sizes", "=", "[", "1.0", "/", "size", "for", "_", "in", "range", "(", "size", ")", "]", "partition", "=", "DataPartitioner", "(", "dataset", ",", "partition_sizes", ")", "partition", "=", "partition", ".", "use", "(", "dist", ".", "get_rank", "(", ")", ")", "train_set", "=", "torch", ".", "utils", ".", "data", ".", "DataLoader", "(", "partition", ",", "batch_size", "=", "int", "(", "bsz", ")", ",", "shuffle", "=", "True", ")", "return", "train_set", ",", "bsz" ]
Partitioning MNIST
[ "Partitioning", "MNIST" ]
c7cc8db32da8d2ec77a382a55089f4e17247ce41
https://github.com/Microsoft/nni/blob/c7cc8db32da8d2ec77a382a55089f4e17247ce41/examples/trials/mnist-distributed-pytorch/dist_mnist.py#L93-L110
train
Microsoft/nni
examples/trials/mnist-distributed-pytorch/dist_mnist.py
average_gradients
def average_gradients(model): """ Gradient averaging. """ size = float(dist.get_world_size()) for param in model.parameters(): dist.all_reduce(param.grad.data, op=dist.reduce_op.SUM, group=0) param.grad.data /= size
python
def average_gradients(model): """ Gradient averaging. """ size = float(dist.get_world_size()) for param in model.parameters(): dist.all_reduce(param.grad.data, op=dist.reduce_op.SUM, group=0) param.grad.data /= size
[ "def", "average_gradients", "(", "model", ")", ":", "size", "=", "float", "(", "dist", ".", "get_world_size", "(", ")", ")", "for", "param", "in", "model", ".", "parameters", "(", ")", ":", "dist", ".", "all_reduce", "(", "param", ".", "grad", ".", "data", ",", "op", "=", "dist", ".", "reduce_op", ".", "SUM", ",", "group", "=", "0", ")", "param", ".", "grad", ".", "data", "/=", "size" ]
Gradient averaging.
[ "Gradient", "averaging", "." ]
c7cc8db32da8d2ec77a382a55089f4e17247ce41
https://github.com/Microsoft/nni/blob/c7cc8db32da8d2ec77a382a55089f4e17247ce41/examples/trials/mnist-distributed-pytorch/dist_mnist.py#L113-L118
train
Microsoft/nni
examples/trials/mnist-distributed-pytorch/dist_mnist.py
run
def run(params): """ Distributed Synchronous SGD Example """ rank = dist.get_rank() torch.manual_seed(1234) train_set, bsz = partition_dataset() model = Net() model = model optimizer = optim.SGD(model.parameters(), lr=params['learning_rate'], momentum=params['momentum']) num_batches = ceil(len(train_set.dataset) / float(bsz)) total_loss = 0.0 for epoch in range(3): epoch_loss = 0.0 for data, target in train_set: data, target = Variable(data), Variable(target) optimizer.zero_grad() output = model(data) loss = F.nll_loss(output, target) epoch_loss += loss.item() loss.backward() average_gradients(model) optimizer.step() #logger.debug('Rank: ', rank, ', epoch: ', epoch, ': ', epoch_loss / num_batches) if rank == 0: nni.report_intermediate_result(epoch_loss / num_batches) total_loss += (epoch_loss / num_batches) total_loss /= 3 logger.debug('Final loss: {}'.format(total_loss)) if rank == 0: nni.report_final_result(total_loss)
python
def run(params): """ Distributed Synchronous SGD Example """ rank = dist.get_rank() torch.manual_seed(1234) train_set, bsz = partition_dataset() model = Net() model = model optimizer = optim.SGD(model.parameters(), lr=params['learning_rate'], momentum=params['momentum']) num_batches = ceil(len(train_set.dataset) / float(bsz)) total_loss = 0.0 for epoch in range(3): epoch_loss = 0.0 for data, target in train_set: data, target = Variable(data), Variable(target) optimizer.zero_grad() output = model(data) loss = F.nll_loss(output, target) epoch_loss += loss.item() loss.backward() average_gradients(model) optimizer.step() #logger.debug('Rank: ', rank, ', epoch: ', epoch, ': ', epoch_loss / num_batches) if rank == 0: nni.report_intermediate_result(epoch_loss / num_batches) total_loss += (epoch_loss / num_batches) total_loss /= 3 logger.debug('Final loss: {}'.format(total_loss)) if rank == 0: nni.report_final_result(total_loss)
[ "def", "run", "(", "params", ")", ":", "rank", "=", "dist", ".", "get_rank", "(", ")", "torch", ".", "manual_seed", "(", "1234", ")", "train_set", ",", "bsz", "=", "partition_dataset", "(", ")", "model", "=", "Net", "(", ")", "model", "=", "model", "optimizer", "=", "optim", ".", "SGD", "(", "model", ".", "parameters", "(", ")", ",", "lr", "=", "params", "[", "'learning_rate'", "]", ",", "momentum", "=", "params", "[", "'momentum'", "]", ")", "num_batches", "=", "ceil", "(", "len", "(", "train_set", ".", "dataset", ")", "/", "float", "(", "bsz", ")", ")", "total_loss", "=", "0.0", "for", "epoch", "in", "range", "(", "3", ")", ":", "epoch_loss", "=", "0.0", "for", "data", ",", "target", "in", "train_set", ":", "data", ",", "target", "=", "Variable", "(", "data", ")", ",", "Variable", "(", "target", ")", "optimizer", ".", "zero_grad", "(", ")", "output", "=", "model", "(", "data", ")", "loss", "=", "F", ".", "nll_loss", "(", "output", ",", "target", ")", "epoch_loss", "+=", "loss", ".", "item", "(", ")", "loss", ".", "backward", "(", ")", "average_gradients", "(", "model", ")", "optimizer", ".", "step", "(", ")", "#logger.debug('Rank: ', rank, ', epoch: ', epoch, ': ', epoch_loss / num_batches)", "if", "rank", "==", "0", ":", "nni", ".", "report_intermediate_result", "(", "epoch_loss", "/", "num_batches", ")", "total_loss", "+=", "(", "epoch_loss", "/", "num_batches", ")", "total_loss", "/=", "3", "logger", ".", "debug", "(", "'Final loss: {}'", ".", "format", "(", "total_loss", ")", ")", "if", "rank", "==", "0", ":", "nni", ".", "report_final_result", "(", "total_loss", ")" ]
Distributed Synchronous SGD Example
[ "Distributed", "Synchronous", "SGD", "Example" ]
c7cc8db32da8d2ec77a382a55089f4e17247ce41
https://github.com/Microsoft/nni/blob/c7cc8db32da8d2ec77a382a55089f4e17247ce41/examples/trials/mnist-distributed-pytorch/dist_mnist.py#L121-L150
train
Microsoft/nni
examples/trials/ga_squad/graph.py
graph_loads
def graph_loads(graph_json): ''' Load graph ''' layers = [] for layer in graph_json['layers']: layer_info = Layer(layer['type'], layer['input'], layer['output'], layer['size']) layer_info.is_delete = layer['is_delete'] layers.append(layer_info) graph = Graph(graph_json['max_layer_num'], [], [], []) graph.layers = layers return graph
python
def graph_loads(graph_json): ''' Load graph ''' layers = [] for layer in graph_json['layers']: layer_info = Layer(layer['type'], layer['input'], layer['output'], layer['size']) layer_info.is_delete = layer['is_delete'] layers.append(layer_info) graph = Graph(graph_json['max_layer_num'], [], [], []) graph.layers = layers return graph
[ "def", "graph_loads", "(", "graph_json", ")", ":", "layers", "=", "[", "]", "for", "layer", "in", "graph_json", "[", "'layers'", "]", ":", "layer_info", "=", "Layer", "(", "layer", "[", "'type'", "]", ",", "layer", "[", "'input'", "]", ",", "layer", "[", "'output'", "]", ",", "layer", "[", "'size'", "]", ")", "layer_info", ".", "is_delete", "=", "layer", "[", "'is_delete'", "]", "layers", ".", "append", "(", "layer_info", ")", "graph", "=", "Graph", "(", "graph_json", "[", "'max_layer_num'", "]", ",", "[", "]", ",", "[", "]", ",", "[", "]", ")", "graph", ".", "layers", "=", "layers", "return", "graph" ]
Load graph
[ "Load", "graph" ]
c7cc8db32da8d2ec77a382a55089f4e17247ce41
https://github.com/Microsoft/nni/blob/c7cc8db32da8d2ec77a382a55089f4e17247ce41/examples/trials/ga_squad/graph.py#L103-L114
train
Microsoft/nni
examples/trials/ga_squad/graph.py
Layer.set_size
def set_size(self, graph_id, size): ''' Set size. ''' if self.graph_type == LayerType.attention.value: if self.input[0] == graph_id: self.size = size if self.graph_type == LayerType.rnn.value: self.size = size if self.graph_type == LayerType.self_attention.value: self.size = size if self.graph_type == LayerType.output.value: if self.size != size: return False return True
python
def set_size(self, graph_id, size): ''' Set size. ''' if self.graph_type == LayerType.attention.value: if self.input[0] == graph_id: self.size = size if self.graph_type == LayerType.rnn.value: self.size = size if self.graph_type == LayerType.self_attention.value: self.size = size if self.graph_type == LayerType.output.value: if self.size != size: return False return True
[ "def", "set_size", "(", "self", ",", "graph_id", ",", "size", ")", ":", "if", "self", ".", "graph_type", "==", "LayerType", ".", "attention", ".", "value", ":", "if", "self", ".", "input", "[", "0", "]", "==", "graph_id", ":", "self", ".", "size", "=", "size", "if", "self", ".", "graph_type", "==", "LayerType", ".", "rnn", ".", "value", ":", "self", ".", "size", "=", "size", "if", "self", ".", "graph_type", "==", "LayerType", ".", "self_attention", ".", "value", ":", "self", ".", "size", "=", "size", "if", "self", ".", "graph_type", "==", "LayerType", ".", "output", ".", "value", ":", "if", "self", ".", "size", "!=", "size", ":", "return", "False", "return", "True" ]
Set size.
[ "Set", "size", "." ]
c7cc8db32da8d2ec77a382a55089f4e17247ce41
https://github.com/Microsoft/nni/blob/c7cc8db32da8d2ec77a382a55089f4e17247ce41/examples/trials/ga_squad/graph.py#L69-L83
train
Microsoft/nni
examples/trials/ga_squad/graph.py
Layer.clear_size
def clear_size(self): ''' Clear size ''' if self.graph_type == LayerType.attention.value or \ LayerType.rnn.value or LayerType.self_attention.value: self.size = None
python
def clear_size(self): ''' Clear size ''' if self.graph_type == LayerType.attention.value or \ LayerType.rnn.value or LayerType.self_attention.value: self.size = None
[ "def", "clear_size", "(", "self", ")", ":", "if", "self", ".", "graph_type", "==", "LayerType", ".", "attention", ".", "value", "or", "LayerType", ".", "rnn", ".", "value", "or", "LayerType", ".", "self_attention", ".", "value", ":", "self", ".", "size", "=", "None" ]
Clear size
[ "Clear", "size" ]
c7cc8db32da8d2ec77a382a55089f4e17247ce41
https://github.com/Microsoft/nni/blob/c7cc8db32da8d2ec77a382a55089f4e17247ce41/examples/trials/ga_squad/graph.py#L85-L91
train
Microsoft/nni
examples/trials/ga_squad/graph.py
Graph.is_topology
def is_topology(self, layers=None): ''' valid the topology ''' if layers is None: layers = self.layers layers_nodle = [] result = [] for i, layer in enumerate(layers): if layer.is_delete is False: layers_nodle.append(i) while True: flag_break = True layers_toremove = [] for layer1 in layers_nodle: flag_arrive = True for layer2 in layers[layer1].input: if layer2 in layers_nodle: flag_arrive = False if flag_arrive is True: for layer2 in layers[layer1].output: # Size is error if layers[layer2].set_size(layer1, layers[layer1].size) is False: return False layers_toremove.append(layer1) result.append(layer1) flag_break = False for layer in layers_toremove: layers_nodle.remove(layer) result.append('|') if flag_break: break # There is loop in graph || some layers can't to arrive if layers_nodle: return False return result
python
def is_topology(self, layers=None): ''' valid the topology ''' if layers is None: layers = self.layers layers_nodle = [] result = [] for i, layer in enumerate(layers): if layer.is_delete is False: layers_nodle.append(i) while True: flag_break = True layers_toremove = [] for layer1 in layers_nodle: flag_arrive = True for layer2 in layers[layer1].input: if layer2 in layers_nodle: flag_arrive = False if flag_arrive is True: for layer2 in layers[layer1].output: # Size is error if layers[layer2].set_size(layer1, layers[layer1].size) is False: return False layers_toremove.append(layer1) result.append(layer1) flag_break = False for layer in layers_toremove: layers_nodle.remove(layer) result.append('|') if flag_break: break # There is loop in graph || some layers can't to arrive if layers_nodle: return False return result
[ "def", "is_topology", "(", "self", ",", "layers", "=", "None", ")", ":", "if", "layers", "is", "None", ":", "layers", "=", "self", ".", "layers", "layers_nodle", "=", "[", "]", "result", "=", "[", "]", "for", "i", ",", "layer", "in", "enumerate", "(", "layers", ")", ":", "if", "layer", ".", "is_delete", "is", "False", ":", "layers_nodle", ".", "append", "(", "i", ")", "while", "True", ":", "flag_break", "=", "True", "layers_toremove", "=", "[", "]", "for", "layer1", "in", "layers_nodle", ":", "flag_arrive", "=", "True", "for", "layer2", "in", "layers", "[", "layer1", "]", ".", "input", ":", "if", "layer2", "in", "layers_nodle", ":", "flag_arrive", "=", "False", "if", "flag_arrive", "is", "True", ":", "for", "layer2", "in", "layers", "[", "layer1", "]", ".", "output", ":", "# Size is error", "if", "layers", "[", "layer2", "]", ".", "set_size", "(", "layer1", ",", "layers", "[", "layer1", "]", ".", "size", ")", "is", "False", ":", "return", "False", "layers_toremove", ".", "append", "(", "layer1", ")", "result", ".", "append", "(", "layer1", ")", "flag_break", "=", "False", "for", "layer", "in", "layers_toremove", ":", "layers_nodle", ".", "remove", "(", "layer", ")", "result", ".", "append", "(", "'|'", ")", "if", "flag_break", ":", "break", "# There is loop in graph || some layers can't to arrive", "if", "layers_nodle", ":", "return", "False", "return", "result" ]
valid the topology
[ "valid", "the", "topology" ]
c7cc8db32da8d2ec77a382a55089f4e17247ce41
https://github.com/Microsoft/nni/blob/c7cc8db32da8d2ec77a382a55089f4e17247ce41/examples/trials/ga_squad/graph.py#L133-L168
train
Microsoft/nni
examples/trials/ga_squad/graph.py
Graph.is_legal
def is_legal(self, layers=None): ''' Judge whether is legal for layers ''' if layers is None: layers = self.layers for layer in layers: if layer.is_delete is False: if len(layer.input) != layer.input_size: return False if len(layer.output) < layer.output_size: return False # layer_num <= max_layer_num if self.layer_num(layers) > self.max_layer_num: return False # There is loop in graph || some layers can't to arrive if self.is_topology(layers) is False: return False return True
python
def is_legal(self, layers=None): ''' Judge whether is legal for layers ''' if layers is None: layers = self.layers for layer in layers: if layer.is_delete is False: if len(layer.input) != layer.input_size: return False if len(layer.output) < layer.output_size: return False # layer_num <= max_layer_num if self.layer_num(layers) > self.max_layer_num: return False # There is loop in graph || some layers can't to arrive if self.is_topology(layers) is False: return False return True
[ "def", "is_legal", "(", "self", ",", "layers", "=", "None", ")", ":", "if", "layers", "is", "None", ":", "layers", "=", "self", ".", "layers", "for", "layer", "in", "layers", ":", "if", "layer", ".", "is_delete", "is", "False", ":", "if", "len", "(", "layer", ".", "input", ")", "!=", "layer", ".", "input_size", ":", "return", "False", "if", "len", "(", "layer", ".", "output", ")", "<", "layer", ".", "output_size", ":", "return", "False", "# layer_num <= max_layer_num", "if", "self", ".", "layer_num", "(", "layers", ")", ">", "self", ".", "max_layer_num", ":", "return", "False", "# There is loop in graph || some layers can't to arrive", "if", "self", ".", "is_topology", "(", "layers", ")", "is", "False", ":", "return", "False", "return", "True" ]
Judge whether is legal for layers
[ "Judge", "whether", "is", "legal", "for", "layers" ]
c7cc8db32da8d2ec77a382a55089f4e17247ce41
https://github.com/Microsoft/nni/blob/c7cc8db32da8d2ec77a382a55089f4e17247ce41/examples/trials/ga_squad/graph.py#L183-L205
train
Microsoft/nni
examples/trials/ga_squad/graph.py
Graph.mutation
def mutation(self, only_add=False): ''' Mutation for a graph ''' types = [] if self.layer_num() < self.max_layer_num: types.append(0) types.append(1) if self.layer_num() > 5 and only_add is False: types.append(2) types.append(3) # 0 : add a layer , delete a edge # 1 : add a layer , change a edge # 2 : delete a layer, delete a edge # 3 : delete a layer, change a edge graph_type = random.choice(types) layer_type = random.choice([LayerType.attention.value,\ LayerType.self_attention.value, LayerType.rnn.value]) layers = copy.deepcopy(self.layers) cnt_try = 0 while True: layers_in = [] layers_out = [] layers_del = [] for i, layer in enumerate(layers): if layer.is_delete is False: if layer.graph_type != LayerType.output.value: layers_in.append(i) if layer.graph_type != LayerType.input.value: layers_out.append(i) if layer.graph_type != LayerType.output.value\ and layer.graph_type != LayerType.input.value: layers_del.append(i) if graph_type <= 1: new_id = len(layers) out = random.choice(layers_out) inputs = [] output = [out] pos = random.randint(0, len(layers[out].input) - 1) last_in = layers[out].input[pos] layers[out].input[pos] = new_id if graph_type == 0: layers[last_in].output.remove(out) if graph_type == 1: layers[last_in].output.remove(out) layers[last_in].output.append(new_id) inputs = [last_in] lay = Layer(graph_type=layer_type, inputs=inputs, output=output) while len(inputs) < lay.input_size: layer1 = random.choice(layers_in) inputs.append(layer1) layers[layer1].output.append(new_id) lay.input = inputs layers.append(lay) else: layer1 = random.choice(layers_del) for layer2 in layers[layer1].output: layers[layer2].input.remove(layer1) if graph_type == 2: random_in = random.choice(layers_in) else: random_in = random.choice(layers[layer1].input) layers[layer2].input.append(random_in) layers[random_in].output.append(layer2) for layer2 in layers[layer1].input: layers[layer2].output.remove(layer1) layers[layer1].is_delete = True if self.is_legal(layers): self.layers = layers break else: layers = copy.deepcopy(self.layers) cnt_try += 1
python
def mutation(self, only_add=False): ''' Mutation for a graph ''' types = [] if self.layer_num() < self.max_layer_num: types.append(0) types.append(1) if self.layer_num() > 5 and only_add is False: types.append(2) types.append(3) # 0 : add a layer , delete a edge # 1 : add a layer , change a edge # 2 : delete a layer, delete a edge # 3 : delete a layer, change a edge graph_type = random.choice(types) layer_type = random.choice([LayerType.attention.value,\ LayerType.self_attention.value, LayerType.rnn.value]) layers = copy.deepcopy(self.layers) cnt_try = 0 while True: layers_in = [] layers_out = [] layers_del = [] for i, layer in enumerate(layers): if layer.is_delete is False: if layer.graph_type != LayerType.output.value: layers_in.append(i) if layer.graph_type != LayerType.input.value: layers_out.append(i) if layer.graph_type != LayerType.output.value\ and layer.graph_type != LayerType.input.value: layers_del.append(i) if graph_type <= 1: new_id = len(layers) out = random.choice(layers_out) inputs = [] output = [out] pos = random.randint(0, len(layers[out].input) - 1) last_in = layers[out].input[pos] layers[out].input[pos] = new_id if graph_type == 0: layers[last_in].output.remove(out) if graph_type == 1: layers[last_in].output.remove(out) layers[last_in].output.append(new_id) inputs = [last_in] lay = Layer(graph_type=layer_type, inputs=inputs, output=output) while len(inputs) < lay.input_size: layer1 = random.choice(layers_in) inputs.append(layer1) layers[layer1].output.append(new_id) lay.input = inputs layers.append(lay) else: layer1 = random.choice(layers_del) for layer2 in layers[layer1].output: layers[layer2].input.remove(layer1) if graph_type == 2: random_in = random.choice(layers_in) else: random_in = random.choice(layers[layer1].input) layers[layer2].input.append(random_in) layers[random_in].output.append(layer2) for layer2 in layers[layer1].input: layers[layer2].output.remove(layer1) layers[layer1].is_delete = True if self.is_legal(layers): self.layers = layers break else: layers = copy.deepcopy(self.layers) cnt_try += 1
[ "def", "mutation", "(", "self", ",", "only_add", "=", "False", ")", ":", "types", "=", "[", "]", "if", "self", ".", "layer_num", "(", ")", "<", "self", ".", "max_layer_num", ":", "types", ".", "append", "(", "0", ")", "types", ".", "append", "(", "1", ")", "if", "self", ".", "layer_num", "(", ")", ">", "5", "and", "only_add", "is", "False", ":", "types", ".", "append", "(", "2", ")", "types", ".", "append", "(", "3", ")", "# 0 : add a layer , delete a edge", "# 1 : add a layer , change a edge", "# 2 : delete a layer, delete a edge", "# 3 : delete a layer, change a edge", "graph_type", "=", "random", ".", "choice", "(", "types", ")", "layer_type", "=", "random", ".", "choice", "(", "[", "LayerType", ".", "attention", ".", "value", ",", "LayerType", ".", "self_attention", ".", "value", ",", "LayerType", ".", "rnn", ".", "value", "]", ")", "layers", "=", "copy", ".", "deepcopy", "(", "self", ".", "layers", ")", "cnt_try", "=", "0", "while", "True", ":", "layers_in", "=", "[", "]", "layers_out", "=", "[", "]", "layers_del", "=", "[", "]", "for", "i", ",", "layer", "in", "enumerate", "(", "layers", ")", ":", "if", "layer", ".", "is_delete", "is", "False", ":", "if", "layer", ".", "graph_type", "!=", "LayerType", ".", "output", ".", "value", ":", "layers_in", ".", "append", "(", "i", ")", "if", "layer", ".", "graph_type", "!=", "LayerType", ".", "input", ".", "value", ":", "layers_out", ".", "append", "(", "i", ")", "if", "layer", ".", "graph_type", "!=", "LayerType", ".", "output", ".", "value", "and", "layer", ".", "graph_type", "!=", "LayerType", ".", "input", ".", "value", ":", "layers_del", ".", "append", "(", "i", ")", "if", "graph_type", "<=", "1", ":", "new_id", "=", "len", "(", "layers", ")", "out", "=", "random", ".", "choice", "(", "layers_out", ")", "inputs", "=", "[", "]", "output", "=", "[", "out", "]", "pos", "=", "random", ".", "randint", "(", "0", ",", "len", "(", "layers", "[", "out", "]", ".", "input", ")", "-", "1", ")", "last_in", "=", "layers", "[", "out", "]", ".", "input", "[", "pos", "]", "layers", "[", "out", "]", ".", "input", "[", "pos", "]", "=", "new_id", "if", "graph_type", "==", "0", ":", "layers", "[", "last_in", "]", ".", "output", ".", "remove", "(", "out", ")", "if", "graph_type", "==", "1", ":", "layers", "[", "last_in", "]", ".", "output", ".", "remove", "(", "out", ")", "layers", "[", "last_in", "]", ".", "output", ".", "append", "(", "new_id", ")", "inputs", "=", "[", "last_in", "]", "lay", "=", "Layer", "(", "graph_type", "=", "layer_type", ",", "inputs", "=", "inputs", ",", "output", "=", "output", ")", "while", "len", "(", "inputs", ")", "<", "lay", ".", "input_size", ":", "layer1", "=", "random", ".", "choice", "(", "layers_in", ")", "inputs", ".", "append", "(", "layer1", ")", "layers", "[", "layer1", "]", ".", "output", ".", "append", "(", "new_id", ")", "lay", ".", "input", "=", "inputs", "layers", ".", "append", "(", "lay", ")", "else", ":", "layer1", "=", "random", ".", "choice", "(", "layers_del", ")", "for", "layer2", "in", "layers", "[", "layer1", "]", ".", "output", ":", "layers", "[", "layer2", "]", ".", "input", ".", "remove", "(", "layer1", ")", "if", "graph_type", "==", "2", ":", "random_in", "=", "random", ".", "choice", "(", "layers_in", ")", "else", ":", "random_in", "=", "random", ".", "choice", "(", "layers", "[", "layer1", "]", ".", "input", ")", "layers", "[", "layer2", "]", ".", "input", ".", "append", "(", "random_in", ")", "layers", "[", "random_in", "]", ".", "output", ".", "append", "(", "layer2", ")", "for", "layer2", "in", "layers", "[", "layer1", "]", ".", "input", ":", "layers", "[", "layer2", "]", ".", "output", ".", "remove", "(", "layer1", ")", "layers", "[", "layer1", "]", ".", "is_delete", "=", "True", "if", "self", ".", "is_legal", "(", "layers", ")", ":", "self", ".", "layers", "=", "layers", "break", "else", ":", "layers", "=", "copy", ".", "deepcopy", "(", "self", ".", "layers", ")", "cnt_try", "+=", "1" ]
Mutation for a graph
[ "Mutation", "for", "a", "graph" ]
c7cc8db32da8d2ec77a382a55089f4e17247ce41
https://github.com/Microsoft/nni/blob/c7cc8db32da8d2ec77a382a55089f4e17247ce41/examples/trials/ga_squad/graph.py#L207-L280
train
Microsoft/nni
src/sdk/pynni/nni/smac_tuner/smac_tuner.py
SMACTuner._main_cli
def _main_cli(self): """Main function of SMAC for CLI interface Returns ------- instance optimizer """ self.logger.info("SMAC call: %s" % (" ".join(sys.argv))) cmd_reader = CMDReader() args, _ = cmd_reader.read_cmd() root_logger = logging.getLogger() root_logger.setLevel(args.verbose_level) logger_handler = logging.StreamHandler( stream=sys.stdout) if root_logger.level >= logging.INFO: formatter = logging.Formatter( "%(levelname)s:\t%(message)s") else: formatter = logging.Formatter( "%(asctime)s:%(levelname)s:%(name)s:%(message)s", "%Y-%m-%d %H:%M:%S") logger_handler.setFormatter(formatter) root_logger.addHandler(logger_handler) # remove default handler root_logger.removeHandler(root_logger.handlers[0]) # Create defaults rh = None initial_configs = None stats = None incumbent = None # Create scenario-object scen = Scenario(args.scenario_file, []) if args.mode == "SMAC": optimizer = SMAC( scenario=scen, rng=np.random.RandomState(args.seed), runhistory=rh, initial_configurations=initial_configs, stats=stats, restore_incumbent=incumbent, run_id=args.seed) elif args.mode == "ROAR": optimizer = ROAR( scenario=scen, rng=np.random.RandomState(args.seed), runhistory=rh, initial_configurations=initial_configs, run_id=args.seed) elif args.mode == "EPILS": optimizer = EPILS( scenario=scen, rng=np.random.RandomState(args.seed), runhistory=rh, initial_configurations=initial_configs, run_id=args.seed) else: optimizer = None return optimizer
python
def _main_cli(self): """Main function of SMAC for CLI interface Returns ------- instance optimizer """ self.logger.info("SMAC call: %s" % (" ".join(sys.argv))) cmd_reader = CMDReader() args, _ = cmd_reader.read_cmd() root_logger = logging.getLogger() root_logger.setLevel(args.verbose_level) logger_handler = logging.StreamHandler( stream=sys.stdout) if root_logger.level >= logging.INFO: formatter = logging.Formatter( "%(levelname)s:\t%(message)s") else: formatter = logging.Formatter( "%(asctime)s:%(levelname)s:%(name)s:%(message)s", "%Y-%m-%d %H:%M:%S") logger_handler.setFormatter(formatter) root_logger.addHandler(logger_handler) # remove default handler root_logger.removeHandler(root_logger.handlers[0]) # Create defaults rh = None initial_configs = None stats = None incumbent = None # Create scenario-object scen = Scenario(args.scenario_file, []) if args.mode == "SMAC": optimizer = SMAC( scenario=scen, rng=np.random.RandomState(args.seed), runhistory=rh, initial_configurations=initial_configs, stats=stats, restore_incumbent=incumbent, run_id=args.seed) elif args.mode == "ROAR": optimizer = ROAR( scenario=scen, rng=np.random.RandomState(args.seed), runhistory=rh, initial_configurations=initial_configs, run_id=args.seed) elif args.mode == "EPILS": optimizer = EPILS( scenario=scen, rng=np.random.RandomState(args.seed), runhistory=rh, initial_configurations=initial_configs, run_id=args.seed) else: optimizer = None return optimizer
[ "def", "_main_cli", "(", "self", ")", ":", "self", ".", "logger", ".", "info", "(", "\"SMAC call: %s\"", "%", "(", "\" \"", ".", "join", "(", "sys", ".", "argv", ")", ")", ")", "cmd_reader", "=", "CMDReader", "(", ")", "args", ",", "_", "=", "cmd_reader", ".", "read_cmd", "(", ")", "root_logger", "=", "logging", ".", "getLogger", "(", ")", "root_logger", ".", "setLevel", "(", "args", ".", "verbose_level", ")", "logger_handler", "=", "logging", ".", "StreamHandler", "(", "stream", "=", "sys", ".", "stdout", ")", "if", "root_logger", ".", "level", ">=", "logging", ".", "INFO", ":", "formatter", "=", "logging", ".", "Formatter", "(", "\"%(levelname)s:\\t%(message)s\"", ")", "else", ":", "formatter", "=", "logging", ".", "Formatter", "(", "\"%(asctime)s:%(levelname)s:%(name)s:%(message)s\"", ",", "\"%Y-%m-%d %H:%M:%S\"", ")", "logger_handler", ".", "setFormatter", "(", "formatter", ")", "root_logger", ".", "addHandler", "(", "logger_handler", ")", "# remove default handler", "root_logger", ".", "removeHandler", "(", "root_logger", ".", "handlers", "[", "0", "]", ")", "# Create defaults", "rh", "=", "None", "initial_configs", "=", "None", "stats", "=", "None", "incumbent", "=", "None", "# Create scenario-object", "scen", "=", "Scenario", "(", "args", ".", "scenario_file", ",", "[", "]", ")", "if", "args", ".", "mode", "==", "\"SMAC\"", ":", "optimizer", "=", "SMAC", "(", "scenario", "=", "scen", ",", "rng", "=", "np", ".", "random", ".", "RandomState", "(", "args", ".", "seed", ")", ",", "runhistory", "=", "rh", ",", "initial_configurations", "=", "initial_configs", ",", "stats", "=", "stats", ",", "restore_incumbent", "=", "incumbent", ",", "run_id", "=", "args", ".", "seed", ")", "elif", "args", ".", "mode", "==", "\"ROAR\"", ":", "optimizer", "=", "ROAR", "(", "scenario", "=", "scen", ",", "rng", "=", "np", ".", "random", ".", "RandomState", "(", "args", ".", "seed", ")", ",", "runhistory", "=", "rh", ",", "initial_configurations", "=", "initial_configs", ",", "run_id", "=", "args", ".", "seed", ")", "elif", "args", ".", "mode", "==", "\"EPILS\"", ":", "optimizer", "=", "EPILS", "(", "scenario", "=", "scen", ",", "rng", "=", "np", ".", "random", ".", "RandomState", "(", "args", ".", "seed", ")", ",", "runhistory", "=", "rh", ",", "initial_configurations", "=", "initial_configs", ",", "run_id", "=", "args", ".", "seed", ")", "else", ":", "optimizer", "=", "None", "return", "optimizer" ]
Main function of SMAC for CLI interface Returns ------- instance optimizer
[ "Main", "function", "of", "SMAC", "for", "CLI", "interface", "Returns", "-------", "instance", "optimizer" ]
c7cc8db32da8d2ec77a382a55089f4e17247ce41
https://github.com/Microsoft/nni/blob/c7cc8db32da8d2ec77a382a55089f4e17247ce41/src/sdk/pynni/nni/smac_tuner/smac_tuner.py#L66-L130
train
Microsoft/nni
src/sdk/pynni/nni/smac_tuner/smac_tuner.py
SMACTuner.update_search_space
def update_search_space(self, search_space): """TODO: this is urgly, we put all the initialization work in this method, because initialization relies on search space, also because update_search_space is called at the beginning. NOTE: updating search space is not supported. Parameters ---------- search_space: search space """ if not self.update_ss_done: self.categorical_dict = generate_scenario(search_space) if self.categorical_dict is None: raise RuntimeError('categorical dict is not correctly returned after parsing search space.') self.optimizer = self._main_cli() self.smbo_solver = self.optimizer.solver self.loguniform_key = {key for key in search_space.keys() if search_space[key]['_type'] == 'loguniform'} self.update_ss_done = True else: self.logger.warning('update search space is not supported.')
python
def update_search_space(self, search_space): """TODO: this is urgly, we put all the initialization work in this method, because initialization relies on search space, also because update_search_space is called at the beginning. NOTE: updating search space is not supported. Parameters ---------- search_space: search space """ if not self.update_ss_done: self.categorical_dict = generate_scenario(search_space) if self.categorical_dict is None: raise RuntimeError('categorical dict is not correctly returned after parsing search space.') self.optimizer = self._main_cli() self.smbo_solver = self.optimizer.solver self.loguniform_key = {key for key in search_space.keys() if search_space[key]['_type'] == 'loguniform'} self.update_ss_done = True else: self.logger.warning('update search space is not supported.')
[ "def", "update_search_space", "(", "self", ",", "search_space", ")", ":", "if", "not", "self", ".", "update_ss_done", ":", "self", ".", "categorical_dict", "=", "generate_scenario", "(", "search_space", ")", "if", "self", ".", "categorical_dict", "is", "None", ":", "raise", "RuntimeError", "(", "'categorical dict is not correctly returned after parsing search space.'", ")", "self", ".", "optimizer", "=", "self", ".", "_main_cli", "(", ")", "self", ".", "smbo_solver", "=", "self", ".", "optimizer", ".", "solver", "self", ".", "loguniform_key", "=", "{", "key", "for", "key", "in", "search_space", ".", "keys", "(", ")", "if", "search_space", "[", "key", "]", "[", "'_type'", "]", "==", "'loguniform'", "}", "self", ".", "update_ss_done", "=", "True", "else", ":", "self", ".", "logger", ".", "warning", "(", "'update search space is not supported.'", ")" ]
TODO: this is urgly, we put all the initialization work in this method, because initialization relies on search space, also because update_search_space is called at the beginning. NOTE: updating search space is not supported. Parameters ---------- search_space: search space
[ "TODO", ":", "this", "is", "urgly", "we", "put", "all", "the", "initialization", "work", "in", "this", "method", "because", "initialization", "relies", "on", "search", "space", "also", "because", "update_search_space", "is", "called", "at", "the", "beginning", ".", "NOTE", ":", "updating", "search", "space", "is", "not", "supported", "." ]
c7cc8db32da8d2ec77a382a55089f4e17247ce41
https://github.com/Microsoft/nni/blob/c7cc8db32da8d2ec77a382a55089f4e17247ce41/src/sdk/pynni/nni/smac_tuner/smac_tuner.py#L132-L151
train
Microsoft/nni
src/sdk/pynni/nni/smac_tuner/smac_tuner.py
SMACTuner.receive_trial_result
def receive_trial_result(self, parameter_id, parameters, value): """receive_trial_result Parameters ---------- parameter_id: int parameter id parameters: parameters value: value Raises ------ RuntimeError Received parameter id not in total_data """ reward = extract_scalar_reward(value) if self.optimize_mode is OptimizeMode.Maximize: reward = -reward if parameter_id not in self.total_data: raise RuntimeError('Received parameter_id not in total_data.') if self.first_one: self.smbo_solver.nni_smac_receive_first_run(self.total_data[parameter_id], reward) self.first_one = False else: self.smbo_solver.nni_smac_receive_runs(self.total_data[parameter_id], reward)
python
def receive_trial_result(self, parameter_id, parameters, value): """receive_trial_result Parameters ---------- parameter_id: int parameter id parameters: parameters value: value Raises ------ RuntimeError Received parameter id not in total_data """ reward = extract_scalar_reward(value) if self.optimize_mode is OptimizeMode.Maximize: reward = -reward if parameter_id not in self.total_data: raise RuntimeError('Received parameter_id not in total_data.') if self.first_one: self.smbo_solver.nni_smac_receive_first_run(self.total_data[parameter_id], reward) self.first_one = False else: self.smbo_solver.nni_smac_receive_runs(self.total_data[parameter_id], reward)
[ "def", "receive_trial_result", "(", "self", ",", "parameter_id", ",", "parameters", ",", "value", ")", ":", "reward", "=", "extract_scalar_reward", "(", "value", ")", "if", "self", ".", "optimize_mode", "is", "OptimizeMode", ".", "Maximize", ":", "reward", "=", "-", "reward", "if", "parameter_id", "not", "in", "self", ".", "total_data", ":", "raise", "RuntimeError", "(", "'Received parameter_id not in total_data.'", ")", "if", "self", ".", "first_one", ":", "self", ".", "smbo_solver", ".", "nni_smac_receive_first_run", "(", "self", ".", "total_data", "[", "parameter_id", "]", ",", "reward", ")", "self", ".", "first_one", "=", "False", "else", ":", "self", ".", "smbo_solver", ".", "nni_smac_receive_runs", "(", "self", ".", "total_data", "[", "parameter_id", "]", ",", "reward", ")" ]
receive_trial_result Parameters ---------- parameter_id: int parameter id parameters: parameters value: value Raises ------ RuntimeError Received parameter id not in total_data
[ "receive_trial_result", "Parameters", "----------", "parameter_id", ":", "int", "parameter", "id", "parameters", ":", "parameters", "value", ":", "value", "Raises", "------", "RuntimeError", "Received", "parameter", "id", "not", "in", "total_data" ]
c7cc8db32da8d2ec77a382a55089f4e17247ce41
https://github.com/Microsoft/nni/blob/c7cc8db32da8d2ec77a382a55089f4e17247ce41/src/sdk/pynni/nni/smac_tuner/smac_tuner.py#L153-L180
train
Microsoft/nni
src/sdk/pynni/nni/smac_tuner/smac_tuner.py
SMACTuner.convert_loguniform_categorical
def convert_loguniform_categorical(self, challenger_dict): """Convert the values of type `loguniform` back to their initial range Also, we convert categorical: categorical values in search space are changed to list of numbers before, those original values will be changed back in this function Parameters ---------- challenger_dict: dict challenger dict Returns ------- dict dict which stores copy of challengers """ converted_dict = {} for key, value in challenger_dict.items(): # convert to loguniform if key in self.loguniform_key: converted_dict[key] = np.exp(challenger_dict[key]) # convert categorical back to original value elif key in self.categorical_dict: idx = challenger_dict[key] converted_dict[key] = self.categorical_dict[key][idx] else: converted_dict[key] = value return converted_dict
python
def convert_loguniform_categorical(self, challenger_dict): """Convert the values of type `loguniform` back to their initial range Also, we convert categorical: categorical values in search space are changed to list of numbers before, those original values will be changed back in this function Parameters ---------- challenger_dict: dict challenger dict Returns ------- dict dict which stores copy of challengers """ converted_dict = {} for key, value in challenger_dict.items(): # convert to loguniform if key in self.loguniform_key: converted_dict[key] = np.exp(challenger_dict[key]) # convert categorical back to original value elif key in self.categorical_dict: idx = challenger_dict[key] converted_dict[key] = self.categorical_dict[key][idx] else: converted_dict[key] = value return converted_dict
[ "def", "convert_loguniform_categorical", "(", "self", ",", "challenger_dict", ")", ":", "converted_dict", "=", "{", "}", "for", "key", ",", "value", "in", "challenger_dict", ".", "items", "(", ")", ":", "# convert to loguniform", "if", "key", "in", "self", ".", "loguniform_key", ":", "converted_dict", "[", "key", "]", "=", "np", ".", "exp", "(", "challenger_dict", "[", "key", "]", ")", "# convert categorical back to original value", "elif", "key", "in", "self", ".", "categorical_dict", ":", "idx", "=", "challenger_dict", "[", "key", "]", "converted_dict", "[", "key", "]", "=", "self", ".", "categorical_dict", "[", "key", "]", "[", "idx", "]", "else", ":", "converted_dict", "[", "key", "]", "=", "value", "return", "converted_dict" ]
Convert the values of type `loguniform` back to their initial range Also, we convert categorical: categorical values in search space are changed to list of numbers before, those original values will be changed back in this function Parameters ---------- challenger_dict: dict challenger dict Returns ------- dict dict which stores copy of challengers
[ "Convert", "the", "values", "of", "type", "loguniform", "back", "to", "their", "initial", "range", "Also", "we", "convert", "categorical", ":", "categorical", "values", "in", "search", "space", "are", "changed", "to", "list", "of", "numbers", "before", "those", "original", "values", "will", "be", "changed", "back", "in", "this", "function", "Parameters", "----------", "challenger_dict", ":", "dict", "challenger", "dict" ]
c7cc8db32da8d2ec77a382a55089f4e17247ce41
https://github.com/Microsoft/nni/blob/c7cc8db32da8d2ec77a382a55089f4e17247ce41/src/sdk/pynni/nni/smac_tuner/smac_tuner.py#L182-L209
train
Microsoft/nni
src/sdk/pynni/nni/smac_tuner/smac_tuner.py
SMACTuner.generate_parameters
def generate_parameters(self, parameter_id): """generate one instance of hyperparameters Parameters ---------- parameter_id: int parameter id Returns ------- list new generated parameters """ if self.first_one: init_challenger = self.smbo_solver.nni_smac_start() self.total_data[parameter_id] = init_challenger return self.convert_loguniform_categorical(init_challenger.get_dictionary()) else: challengers = self.smbo_solver.nni_smac_request_challengers() for challenger in challengers: self.total_data[parameter_id] = challenger return self.convert_loguniform_categorical(challenger.get_dictionary())
python
def generate_parameters(self, parameter_id): """generate one instance of hyperparameters Parameters ---------- parameter_id: int parameter id Returns ------- list new generated parameters """ if self.first_one: init_challenger = self.smbo_solver.nni_smac_start() self.total_data[parameter_id] = init_challenger return self.convert_loguniform_categorical(init_challenger.get_dictionary()) else: challengers = self.smbo_solver.nni_smac_request_challengers() for challenger in challengers: self.total_data[parameter_id] = challenger return self.convert_loguniform_categorical(challenger.get_dictionary())
[ "def", "generate_parameters", "(", "self", ",", "parameter_id", ")", ":", "if", "self", ".", "first_one", ":", "init_challenger", "=", "self", ".", "smbo_solver", ".", "nni_smac_start", "(", ")", "self", ".", "total_data", "[", "parameter_id", "]", "=", "init_challenger", "return", "self", ".", "convert_loguniform_categorical", "(", "init_challenger", ".", "get_dictionary", "(", ")", ")", "else", ":", "challengers", "=", "self", ".", "smbo_solver", ".", "nni_smac_request_challengers", "(", ")", "for", "challenger", "in", "challengers", ":", "self", ".", "total_data", "[", "parameter_id", "]", "=", "challenger", "return", "self", ".", "convert_loguniform_categorical", "(", "challenger", ".", "get_dictionary", "(", ")", ")" ]
generate one instance of hyperparameters Parameters ---------- parameter_id: int parameter id Returns ------- list new generated parameters
[ "generate", "one", "instance", "of", "hyperparameters", "Parameters", "----------", "parameter_id", ":", "int", "parameter", "id", "Returns", "-------", "list", "new", "generated", "parameters" ]
c7cc8db32da8d2ec77a382a55089f4e17247ce41
https://github.com/Microsoft/nni/blob/c7cc8db32da8d2ec77a382a55089f4e17247ce41/src/sdk/pynni/nni/smac_tuner/smac_tuner.py#L211-L232
train
Microsoft/nni
src/sdk/pynni/nni/smac_tuner/smac_tuner.py
SMACTuner.generate_multiple_parameters
def generate_multiple_parameters(self, parameter_id_list): """generate mutiple instances of hyperparameters Parameters ---------- parameter_id_list: list list of parameter id Returns ------- list list of new generated parameters """ if self.first_one: params = [] for one_id in parameter_id_list: init_challenger = self.smbo_solver.nni_smac_start() self.total_data[one_id] = init_challenger params.append(self.convert_loguniform_categorical(init_challenger.get_dictionary())) else: challengers = self.smbo_solver.nni_smac_request_challengers() cnt = 0 params = [] for challenger in challengers: if cnt >= len(parameter_id_list): break self.total_data[parameter_id_list[cnt]] = challenger params.append(self.convert_loguniform_categorical(challenger.get_dictionary())) cnt += 1 return params
python
def generate_multiple_parameters(self, parameter_id_list): """generate mutiple instances of hyperparameters Parameters ---------- parameter_id_list: list list of parameter id Returns ------- list list of new generated parameters """ if self.first_one: params = [] for one_id in parameter_id_list: init_challenger = self.smbo_solver.nni_smac_start() self.total_data[one_id] = init_challenger params.append(self.convert_loguniform_categorical(init_challenger.get_dictionary())) else: challengers = self.smbo_solver.nni_smac_request_challengers() cnt = 0 params = [] for challenger in challengers: if cnt >= len(parameter_id_list): break self.total_data[parameter_id_list[cnt]] = challenger params.append(self.convert_loguniform_categorical(challenger.get_dictionary())) cnt += 1 return params
[ "def", "generate_multiple_parameters", "(", "self", ",", "parameter_id_list", ")", ":", "if", "self", ".", "first_one", ":", "params", "=", "[", "]", "for", "one_id", "in", "parameter_id_list", ":", "init_challenger", "=", "self", ".", "smbo_solver", ".", "nni_smac_start", "(", ")", "self", ".", "total_data", "[", "one_id", "]", "=", "init_challenger", "params", ".", "append", "(", "self", ".", "convert_loguniform_categorical", "(", "init_challenger", ".", "get_dictionary", "(", ")", ")", ")", "else", ":", "challengers", "=", "self", ".", "smbo_solver", ".", "nni_smac_request_challengers", "(", ")", "cnt", "=", "0", "params", "=", "[", "]", "for", "challenger", "in", "challengers", ":", "if", "cnt", ">=", "len", "(", "parameter_id_list", ")", ":", "break", "self", ".", "total_data", "[", "parameter_id_list", "[", "cnt", "]", "]", "=", "challenger", "params", ".", "append", "(", "self", ".", "convert_loguniform_categorical", "(", "challenger", ".", "get_dictionary", "(", ")", ")", ")", "cnt", "+=", "1", "return", "params" ]
generate mutiple instances of hyperparameters Parameters ---------- parameter_id_list: list list of parameter id Returns ------- list list of new generated parameters
[ "generate", "mutiple", "instances", "of", "hyperparameters", "Parameters", "----------", "parameter_id_list", ":", "list", "list", "of", "parameter", "id", "Returns", "-------", "list", "list", "of", "new", "generated", "parameters" ]
c7cc8db32da8d2ec77a382a55089f4e17247ce41
https://github.com/Microsoft/nni/blob/c7cc8db32da8d2ec77a382a55089f4e17247ce41/src/sdk/pynni/nni/smac_tuner/smac_tuner.py#L234-L263
train
Microsoft/nni
examples/trials/kaggle-tgs-salt/lovasz_losses.py
lovasz_grad
def lovasz_grad(gt_sorted): """ Computes gradient of the Lovasz extension w.r.t sorted errors See Alg. 1 in paper """ p = len(gt_sorted) gts = gt_sorted.sum() intersection = gts - gt_sorted.float().cumsum(0) union = gts + (1 - gt_sorted).float().cumsum(0) jaccard = 1. - intersection / union if p > 1: # cover 1-pixel case jaccard[1:p] = jaccard[1:p] - jaccard[0:-1] return jaccard
python
def lovasz_grad(gt_sorted): """ Computes gradient of the Lovasz extension w.r.t sorted errors See Alg. 1 in paper """ p = len(gt_sorted) gts = gt_sorted.sum() intersection = gts - gt_sorted.float().cumsum(0) union = gts + (1 - gt_sorted).float().cumsum(0) jaccard = 1. - intersection / union if p > 1: # cover 1-pixel case jaccard[1:p] = jaccard[1:p] - jaccard[0:-1] return jaccard
[ "def", "lovasz_grad", "(", "gt_sorted", ")", ":", "p", "=", "len", "(", "gt_sorted", ")", "gts", "=", "gt_sorted", ".", "sum", "(", ")", "intersection", "=", "gts", "-", "gt_sorted", ".", "float", "(", ")", ".", "cumsum", "(", "0", ")", "union", "=", "gts", "+", "(", "1", "-", "gt_sorted", ")", ".", "float", "(", ")", ".", "cumsum", "(", "0", ")", "jaccard", "=", "1.", "-", "intersection", "/", "union", "if", "p", ">", "1", ":", "# cover 1-pixel case", "jaccard", "[", "1", ":", "p", "]", "=", "jaccard", "[", "1", ":", "p", "]", "-", "jaccard", "[", "0", ":", "-", "1", "]", "return", "jaccard" ]
Computes gradient of the Lovasz extension w.r.t sorted errors See Alg. 1 in paper
[ "Computes", "gradient", "of", "the", "Lovasz", "extension", "w", ".", "r", ".", "t", "sorted", "errors", "See", "Alg", ".", "1", "in", "paper" ]
c7cc8db32da8d2ec77a382a55089f4e17247ce41
https://github.com/Microsoft/nni/blob/c7cc8db32da8d2ec77a382a55089f4e17247ce41/examples/trials/kaggle-tgs-salt/lovasz_losses.py#L36-L48
train
Microsoft/nni
examples/trials/kaggle-tgs-salt/lovasz_losses.py
iou_binary
def iou_binary(preds, labels, EMPTY=1., ignore=None, per_image=True): """ IoU for foreground class binary: 1 foreground, 0 background """ if not per_image: preds, labels = (preds,), (labels,) ious = [] for pred, label in zip(preds, labels): intersection = ((label == 1) & (pred == 1)).sum() union = ((label == 1) | ((pred == 1) & (label != ignore))).sum() if not union: iou = EMPTY else: iou = float(intersection) / union ious.append(iou) iou = mean(ious) # mean accross images if per_image return 100 * iou
python
def iou_binary(preds, labels, EMPTY=1., ignore=None, per_image=True): """ IoU for foreground class binary: 1 foreground, 0 background """ if not per_image: preds, labels = (preds,), (labels,) ious = [] for pred, label in zip(preds, labels): intersection = ((label == 1) & (pred == 1)).sum() union = ((label == 1) | ((pred == 1) & (label != ignore))).sum() if not union: iou = EMPTY else: iou = float(intersection) / union ious.append(iou) iou = mean(ious) # mean accross images if per_image return 100 * iou
[ "def", "iou_binary", "(", "preds", ",", "labels", ",", "EMPTY", "=", "1.", ",", "ignore", "=", "None", ",", "per_image", "=", "True", ")", ":", "if", "not", "per_image", ":", "preds", ",", "labels", "=", "(", "preds", ",", ")", ",", "(", "labels", ",", ")", "ious", "=", "[", "]", "for", "pred", ",", "label", "in", "zip", "(", "preds", ",", "labels", ")", ":", "intersection", "=", "(", "(", "label", "==", "1", ")", "&", "(", "pred", "==", "1", ")", ")", ".", "sum", "(", ")", "union", "=", "(", "(", "label", "==", "1", ")", "|", "(", "(", "pred", "==", "1", ")", "&", "(", "label", "!=", "ignore", ")", ")", ")", ".", "sum", "(", ")", "if", "not", "union", ":", "iou", "=", "EMPTY", "else", ":", "iou", "=", "float", "(", "intersection", ")", "/", "union", "ious", ".", "append", "(", "iou", ")", "iou", "=", "mean", "(", "ious", ")", "# mean accross images if per_image", "return", "100", "*", "iou" ]
IoU for foreground class binary: 1 foreground, 0 background
[ "IoU", "for", "foreground", "class", "binary", ":", "1", "foreground", "0", "background" ]
c7cc8db32da8d2ec77a382a55089f4e17247ce41
https://github.com/Microsoft/nni/blob/c7cc8db32da8d2ec77a382a55089f4e17247ce41/examples/trials/kaggle-tgs-salt/lovasz_losses.py#L51-L68
train