repository_name
stringclasses 316
values | func_path_in_repository
stringlengths 6
223
| func_name
stringlengths 1
134
| language
stringclasses 1
value | func_code_string
stringlengths 57
65.5k
| func_documentation_string
stringlengths 1
46.3k
| split_name
stringclasses 1
value | func_code_url
stringlengths 91
315
| called_functions
listlengths 1
156
⌀ | enclosing_scope
stringlengths 2
1.48M
|
|---|---|---|---|---|---|---|---|---|---|
eight04/pyAPNG
|
apng/__init__.py
|
read_file
|
python
|
def read_file(file):
if hasattr(file, "read"):
return file.read()
if hasattr(file, "read_bytes"):
return file.read_bytes()
with open(file, "rb") as f:
return f.read()
|
Read ``file`` into ``bytes``.
:arg file type: path-like or file-like
:rtype: bytes
|
train
|
https://github.com/eight04/pyAPNG/blob/b4d2927f7892a1de967b5cf57d434ed65f6a017e/apng/__init__.py#L113-L124
| null |
#! python3
"""This is an APNG module, which can create apng file from pngs
Reference:
http://littlesvr.ca/apng/
http://wiki.mozilla.org/APNG_Specification
https://www.w3.org/TR/PNG/
"""
import struct
import binascii
import io
import zlib
from collections import namedtuple
__version__ = "0.3.3"
PNG_SIGN = b"\x89\x50\x4E\x47\x0D\x0A\x1A\x0A"
# http://www.libpng.org/pub/png/spec/1.2/PNG-Chunks.html#C.Summary-of-standard-chunks
CHUNK_BEFORE_IDAT = {
"cHRM", "gAMA", "iCCP", "sBIT", "sRGB", "bKGD", "hIST", "tRNS", "pHYs",
"sPLT", "tIME", "PLTE"
}
def parse_chunks(b):
"""Parse PNG bytes into multiple chunks.
:arg bytes b: The raw bytes of the PNG file.
:return: A generator yielding :class:`Chunk`.
:rtype: Iterator[Chunk]
"""
# skip signature
i = 8
# yield chunks
while i < len(b):
data_len, = struct.unpack("!I", b[i:i+4])
type_ = b[i+4:i+8].decode("latin-1")
yield Chunk(type_, b[i:i+data_len+12])
i += data_len + 12
def make_chunk(chunk_type, chunk_data):
"""Create a raw chunk by composing chunk type and data. It
calculates chunk length and CRC for you.
:arg str chunk_type: PNG chunk type.
:arg bytes chunk_data: PNG chunk data, **excluding chunk length, type, and CRC**.
:rtype: bytes
"""
out = struct.pack("!I", len(chunk_data))
chunk_data = chunk_type.encode("latin-1") + chunk_data
out += chunk_data + struct.pack("!I", binascii.crc32(chunk_data) & 0xffffffff)
return out
def make_text_chunk(
type="tEXt", key="Comment", value="",
compression_flag=0, compression_method=0, lang="", translated_key=""):
"""Create a text chunk with a key value pair.
See https://www.w3.org/TR/PNG/#11textinfo for text chunk information.
Usage:
.. code:: python
from apng import APNG, make_text_chunk
im = APNG.open("file.png")
png, control = im.frames[0]
png.chunks.append(make_text_chunk("tEXt", "Comment", "some text"))
im.save("file.png")
:arg str type: Text chunk type: "tEXt", "zTXt", or "iTXt":
tEXt uses Latin-1 characters.
zTXt uses Latin-1 characters, compressed with zlib.
iTXt uses UTF-8 characters.
:arg str key: The key string, 1-79 characters.
:arg str value: The text value. It would be encoded into
:class:`bytes` and compressed if needed.
:arg int compression_flag: The compression flag for iTXt.
:arg int compression_method: The compression method for zTXt and iTXt.
:arg str lang: The language tag for iTXt.
:arg str translated_key: The translated keyword for iTXt.
:rtype: Chunk
"""
# pylint: disable=redefined-builtin
if type == "tEXt":
data = key.encode("latin-1") + b"\0" + value.encode("latin-1")
elif type == "zTXt":
data = (
key.encode("latin-1") + struct.pack("!xb", compression_method) +
zlib.compress(value.encode("latin-1"))
)
elif type == "iTXt":
data = (
key.encode("latin-1") +
struct.pack("!xbb", compression_flag, compression_method) +
lang.encode("latin-1") + b"\0" +
translated_key.encode("utf-8") + b"\0"
)
if compression_flag:
data += zlib.compress(value.encode("utf-8"))
else:
data += value.encode("utf-8")
else:
raise TypeError("unknown type {!r}".format(type))
return Chunk(type, make_chunk(type, data))
def read_file(file):
"""Read ``file`` into ``bytes``.
:arg file type: path-like or file-like
:rtype: bytes
"""
if hasattr(file, "read"):
return file.read()
if hasattr(file, "read_bytes"):
return file.read_bytes()
with open(file, "rb") as f:
return f.read()
def write_file(file, b):
"""Write ``b`` to file ``file``.
:arg file type: path-like or file-like object.
:arg bytes b: The content.
"""
if hasattr(file, "write_bytes"):
file.write_bytes(b)
elif hasattr(file, "write"):
file.write(b)
else:
with open(file, "wb") as f:
f.write(b)
def open_file(file, mode):
"""Open a file.
:arg file: file-like or path-like object.
:arg str mode: ``mode`` argument for :func:`open`.
"""
if hasattr(file, "read"):
return file
if hasattr(file, "open"):
return file.open(mode)
return open(file, mode)
def file_to_png(fp):
"""Convert an image to PNG format with Pillow.
:arg file-like fp: The image file.
:rtype: bytes
"""
import PIL.Image # pylint: disable=import-error
with io.BytesIO() as dest:
PIL.Image.open(fp).save(dest, "PNG", optimize=True)
return dest.getvalue()
class Chunk(namedtuple("Chunk", ["type", "data"])):
"""A namedtuple to represent the PNG chunk.
:arg str type: The chunk type.
:arg bytes data: The raw bytes of the chunk, including chunk length, type,
data, and CRC.
"""
pass
class PNG:
"""Represent a PNG image.
"""
def __init__(self):
self.hdr = None
self.end = None
self.width = None
self.height = None
self.chunks = []
"""A list of :class:`Chunk`. After reading a PNG file, the bytes
are parsed into multiple chunks. You can remove/add chunks into
this array before calling :func:`to_bytes`."""
def init(self):
"""Extract some info from chunks"""
for type_, data in self.chunks:
if type_ == "IHDR":
self.hdr = data
elif type_ == "IEND":
self.end = data
if self.hdr:
# grab w, h info
self.width, self.height = struct.unpack("!II", self.hdr[8:16])
@classmethod
def open(cls, file):
"""Open a PNG file.
:arg file: Input file.
:type file: path-like or file-like
:rtype: :class:`PNG`
"""
return cls.from_bytes(read_file(file))
@classmethod
def open_any(cls, file):
"""Open an image file. If the image is not PNG format, it would convert
the image into PNG with Pillow module. If the module is not
installed, :class:`ImportError` would be raised.
:arg file: Input file.
:type file: path-like or file-like
:rtype: :class:`PNG`
"""
with open_file(file, "rb") as f:
header = f.read(8)
f.seek(0)
if header != PNG_SIGN:
b = file_to_png(f)
else:
b = f.read()
return cls.from_bytes(b)
@classmethod
def from_bytes(cls, b):
"""Create :class:`PNG` from raw bytes.
:arg bytes b: The raw bytes of the PNG file.
:rtype: :class:`PNG`
"""
im = cls()
im.chunks = list(parse_chunks(b))
im.init()
return im
@classmethod
def from_chunks(cls, chunks):
"""Construct PNG from raw chunks.
:arg chunks: A list of ``(chunk_type, chunk_raw_data)``. Also see
:func:`chunks`.
:type chunks: list[tuple(str, bytes)]
"""
im = cls()
im.chunks = chunks
im.init()
return im
def to_bytes(self):
"""Convert the entire image to bytes.
:rtype: bytes
"""
chunks = [PNG_SIGN]
chunks.extend(c[1] for c in self.chunks)
return b"".join(chunks)
def save(self, file):
"""Save the entire image to a file.
:arg file: Output file.
:type file: path-like or file-like
"""
write_file(file, self.to_bytes())
class FrameControl:
"""A data class holding fcTL info."""
def __init__(self, width=None, height=None, x_offset=0, y_offset=0,
delay=100, delay_den=1000, depose_op=1, blend_op=0):
"""Parameters are assigned as object members. See
`https://wiki.mozilla.org/APNG_Specification
<https://wiki.mozilla.org/APNG_Specification#.60fcTL.60:_The_Frame_Control_Chunk>`_
for the detail of fcTL.
"""
self.width = width
self.height = height
self.x_offset = x_offset
self.y_offset = y_offset
self.delay = delay
self.delay_den = delay_den
self.depose_op = depose_op
self.blend_op = blend_op
def to_bytes(self):
"""Convert to bytes.
:rtype: bytes
"""
return struct.pack(
"!IIIIHHbb", self.width, self.height, self.x_offset, self.y_offset,
self.delay, self.delay_den, self.depose_op, self.blend_op
)
@classmethod
def from_bytes(cls, b):
"""Contruct fcTL info from bytes.
:arg bytes b: The length of ``b`` must be *28*, excluding sequence
number and CRC.
"""
return cls(*struct.unpack("!IIIIHHbb", b))
class APNG:
"""Represent an APNG image."""
def __init__(self, num_plays=0):
"""An :class:`APNG` is composed by multiple :class:`PNG` s and
:class:`FrameControl`, which can be inserted with :meth:`append`.
:arg int num_plays: Number of times to loop. 0 = infinite.
:var frames: The frames of APNG.
:vartype frames: list[tuple(PNG, FrameControl)]
:var int num_plays: same as ``num_plays``.
"""
self.frames = []
self.num_plays = num_plays
def append(self, png, **options):
"""Append one frame.
:arg PNG png: Append a :class:`PNG` as a frame.
:arg dict options: The options for :class:`FrameControl`.
"""
if not isinstance(png, PNG):
raise TypeError("Expect an instance of `PNG` but got `{}`".format(png))
control = FrameControl(**options)
if control.width is None:
control.width = png.width
if control.height is None:
control.height = png.height
self.frames.append((png, control))
def append_file(self, file, **options):
"""Create a PNG from file and append the PNG as a frame.
:arg file: Input file.
:type file: path-like or file-like.
:arg dict options: The options for :class:`FrameControl`.
"""
self.append(PNG.open_any(file), **options)
def to_bytes(self):
"""Convert the entire image to bytes.
:rtype: bytes
"""
# grab the chunks we needs
out = [PNG_SIGN]
# FIXME: it's tricky to define "other_chunks". HoneyView stop the
# animation if it sees chunks other than fctl or idat, so we put other
# chunks to the end of the file
other_chunks = []
seq = 0
# for first frame
png, control = self.frames[0]
# header
out.append(png.hdr)
# acTL
out.append(make_chunk("acTL", struct.pack("!II", len(self.frames), self.num_plays)))
# fcTL
if control:
out.append(make_chunk("fcTL", struct.pack("!I", seq) + control.to_bytes()))
seq += 1
# and others...
idat_chunks = []
for type_, data in png.chunks:
if type_ in ("IHDR", "IEND"):
continue
if type_ == "IDAT":
# put at last
idat_chunks.append(data)
continue
out.append(data)
out.extend(idat_chunks)
# FIXME: we should do some optimization to frames...
# for other frames
for png, control in self.frames[1:]:
# fcTL
out.append(
make_chunk("fcTL", struct.pack("!I", seq) + control.to_bytes())
)
seq += 1
# and others...
for type_, data in png.chunks:
if type_ in ("IHDR", "IEND") or type_ in CHUNK_BEFORE_IDAT:
continue
elif type_ == "IDAT":
# convert IDAT to fdAT
out.append(
make_chunk("fdAT", struct.pack("!I", seq) + data[8:-4])
)
seq += 1
else:
other_chunks.append(data)
# end
out.extend(other_chunks)
out.append(png.end)
return b"".join(out)
@classmethod
def from_files(cls, files, **options):
"""Create an APNG from multiple files.
This is a shortcut of::
im = APNG()
for file in files:
im.append_file(file, **options)
:arg list files: A list of filename. See :meth:`PNG.open`.
:arg dict options: Options for :class:`FrameControl`.
:rtype: APNG
"""
im = cls()
for file in files:
im.append_file(file, **options)
return im
@classmethod
def from_bytes(cls, b):
"""Create an APNG from raw bytes.
:arg bytes b: The raw bytes of the APNG file.
:rtype: APNG
"""
hdr = None
head_chunks = []
end = ("IEND", make_chunk("IEND", b""))
frame_chunks = []
frames = []
num_plays = 0
frame_has_head_chunks = False
control = None
for type_, data in parse_chunks(b):
if type_ == "IHDR":
hdr = data
frame_chunks.append((type_, data))
elif type_ == "acTL":
_num_frames, num_plays = struct.unpack("!II", data[8:-4])
continue
elif type_ == "fcTL":
if any(type_ == "IDAT" for type_, data in frame_chunks):
# IDAT inside chunk, go to next frame
frame_chunks.append(end)
frames.append((PNG.from_chunks(frame_chunks), control))
frame_has_head_chunks = False
control = FrameControl.from_bytes(data[12:-4])
# https://github.com/PyCQA/pylint/issues/2072
# pylint: disable=typecheck
hdr = make_chunk("IHDR", struct.pack("!II", control.width, control.height) + hdr[16:-4])
frame_chunks = [("IHDR", hdr)]
else:
control = FrameControl.from_bytes(data[12:-4])
elif type_ == "IDAT":
if not frame_has_head_chunks:
frame_chunks.extend(head_chunks)
frame_has_head_chunks = True
frame_chunks.append((type_, data))
elif type_ == "fdAT":
# convert to IDAT
if not frame_has_head_chunks:
frame_chunks.extend(head_chunks)
frame_has_head_chunks = True
frame_chunks.append(("IDAT", make_chunk("IDAT", data[12:-4])))
elif type_ == "IEND":
# end
frame_chunks.append(end)
frames.append((PNG.from_chunks(frame_chunks), control))
break
elif type_ in CHUNK_BEFORE_IDAT:
head_chunks.append((type_, data))
else:
frame_chunks.append((type_, data))
o = cls()
o.frames = frames
o.num_plays = num_plays
return o
@classmethod
def open(cls, file):
"""Open an APNG file.
:arg file: Input file.
:type file: path-like or file-like.
:rtype: APNG
"""
return cls.from_bytes(read_file(file))
def save(self, file):
"""Save the entire image to a file.
:arg file: Output file.
:type file: path-like or file-like
"""
write_file(file, self.to_bytes())
|
eight04/pyAPNG
|
apng/__init__.py
|
write_file
|
python
|
def write_file(file, b):
if hasattr(file, "write_bytes"):
file.write_bytes(b)
elif hasattr(file, "write"):
file.write(b)
else:
with open(file, "wb") as f:
f.write(b)
|
Write ``b`` to file ``file``.
:arg file type: path-like or file-like object.
:arg bytes b: The content.
|
train
|
https://github.com/eight04/pyAPNG/blob/b4d2927f7892a1de967b5cf57d434ed65f6a017e/apng/__init__.py#L126-L138
| null |
#! python3
"""This is an APNG module, which can create apng file from pngs
Reference:
http://littlesvr.ca/apng/
http://wiki.mozilla.org/APNG_Specification
https://www.w3.org/TR/PNG/
"""
import struct
import binascii
import io
import zlib
from collections import namedtuple
__version__ = "0.3.3"
PNG_SIGN = b"\x89\x50\x4E\x47\x0D\x0A\x1A\x0A"
# http://www.libpng.org/pub/png/spec/1.2/PNG-Chunks.html#C.Summary-of-standard-chunks
CHUNK_BEFORE_IDAT = {
"cHRM", "gAMA", "iCCP", "sBIT", "sRGB", "bKGD", "hIST", "tRNS", "pHYs",
"sPLT", "tIME", "PLTE"
}
def parse_chunks(b):
"""Parse PNG bytes into multiple chunks.
:arg bytes b: The raw bytes of the PNG file.
:return: A generator yielding :class:`Chunk`.
:rtype: Iterator[Chunk]
"""
# skip signature
i = 8
# yield chunks
while i < len(b):
data_len, = struct.unpack("!I", b[i:i+4])
type_ = b[i+4:i+8].decode("latin-1")
yield Chunk(type_, b[i:i+data_len+12])
i += data_len + 12
def make_chunk(chunk_type, chunk_data):
"""Create a raw chunk by composing chunk type and data. It
calculates chunk length and CRC for you.
:arg str chunk_type: PNG chunk type.
:arg bytes chunk_data: PNG chunk data, **excluding chunk length, type, and CRC**.
:rtype: bytes
"""
out = struct.pack("!I", len(chunk_data))
chunk_data = chunk_type.encode("latin-1") + chunk_data
out += chunk_data + struct.pack("!I", binascii.crc32(chunk_data) & 0xffffffff)
return out
def make_text_chunk(
type="tEXt", key="Comment", value="",
compression_flag=0, compression_method=0, lang="", translated_key=""):
"""Create a text chunk with a key value pair.
See https://www.w3.org/TR/PNG/#11textinfo for text chunk information.
Usage:
.. code:: python
from apng import APNG, make_text_chunk
im = APNG.open("file.png")
png, control = im.frames[0]
png.chunks.append(make_text_chunk("tEXt", "Comment", "some text"))
im.save("file.png")
:arg str type: Text chunk type: "tEXt", "zTXt", or "iTXt":
tEXt uses Latin-1 characters.
zTXt uses Latin-1 characters, compressed with zlib.
iTXt uses UTF-8 characters.
:arg str key: The key string, 1-79 characters.
:arg str value: The text value. It would be encoded into
:class:`bytes` and compressed if needed.
:arg int compression_flag: The compression flag for iTXt.
:arg int compression_method: The compression method for zTXt and iTXt.
:arg str lang: The language tag for iTXt.
:arg str translated_key: The translated keyword for iTXt.
:rtype: Chunk
"""
# pylint: disable=redefined-builtin
if type == "tEXt":
data = key.encode("latin-1") + b"\0" + value.encode("latin-1")
elif type == "zTXt":
data = (
key.encode("latin-1") + struct.pack("!xb", compression_method) +
zlib.compress(value.encode("latin-1"))
)
elif type == "iTXt":
data = (
key.encode("latin-1") +
struct.pack("!xbb", compression_flag, compression_method) +
lang.encode("latin-1") + b"\0" +
translated_key.encode("utf-8") + b"\0"
)
if compression_flag:
data += zlib.compress(value.encode("utf-8"))
else:
data += value.encode("utf-8")
else:
raise TypeError("unknown type {!r}".format(type))
return Chunk(type, make_chunk(type, data))
def read_file(file):
"""Read ``file`` into ``bytes``.
:arg file type: path-like or file-like
:rtype: bytes
"""
if hasattr(file, "read"):
return file.read()
if hasattr(file, "read_bytes"):
return file.read_bytes()
with open(file, "rb") as f:
return f.read()
def write_file(file, b):
"""Write ``b`` to file ``file``.
:arg file type: path-like or file-like object.
:arg bytes b: The content.
"""
if hasattr(file, "write_bytes"):
file.write_bytes(b)
elif hasattr(file, "write"):
file.write(b)
else:
with open(file, "wb") as f:
f.write(b)
def open_file(file, mode):
"""Open a file.
:arg file: file-like or path-like object.
:arg str mode: ``mode`` argument for :func:`open`.
"""
if hasattr(file, "read"):
return file
if hasattr(file, "open"):
return file.open(mode)
return open(file, mode)
def file_to_png(fp):
"""Convert an image to PNG format with Pillow.
:arg file-like fp: The image file.
:rtype: bytes
"""
import PIL.Image # pylint: disable=import-error
with io.BytesIO() as dest:
PIL.Image.open(fp).save(dest, "PNG", optimize=True)
return dest.getvalue()
class Chunk(namedtuple("Chunk", ["type", "data"])):
"""A namedtuple to represent the PNG chunk.
:arg str type: The chunk type.
:arg bytes data: The raw bytes of the chunk, including chunk length, type,
data, and CRC.
"""
pass
class PNG:
"""Represent a PNG image.
"""
def __init__(self):
self.hdr = None
self.end = None
self.width = None
self.height = None
self.chunks = []
"""A list of :class:`Chunk`. After reading a PNG file, the bytes
are parsed into multiple chunks. You can remove/add chunks into
this array before calling :func:`to_bytes`."""
def init(self):
"""Extract some info from chunks"""
for type_, data in self.chunks:
if type_ == "IHDR":
self.hdr = data
elif type_ == "IEND":
self.end = data
if self.hdr:
# grab w, h info
self.width, self.height = struct.unpack("!II", self.hdr[8:16])
@classmethod
def open(cls, file):
"""Open a PNG file.
:arg file: Input file.
:type file: path-like or file-like
:rtype: :class:`PNG`
"""
return cls.from_bytes(read_file(file))
@classmethod
def open_any(cls, file):
"""Open an image file. If the image is not PNG format, it would convert
the image into PNG with Pillow module. If the module is not
installed, :class:`ImportError` would be raised.
:arg file: Input file.
:type file: path-like or file-like
:rtype: :class:`PNG`
"""
with open_file(file, "rb") as f:
header = f.read(8)
f.seek(0)
if header != PNG_SIGN:
b = file_to_png(f)
else:
b = f.read()
return cls.from_bytes(b)
@classmethod
def from_bytes(cls, b):
"""Create :class:`PNG` from raw bytes.
:arg bytes b: The raw bytes of the PNG file.
:rtype: :class:`PNG`
"""
im = cls()
im.chunks = list(parse_chunks(b))
im.init()
return im
@classmethod
def from_chunks(cls, chunks):
"""Construct PNG from raw chunks.
:arg chunks: A list of ``(chunk_type, chunk_raw_data)``. Also see
:func:`chunks`.
:type chunks: list[tuple(str, bytes)]
"""
im = cls()
im.chunks = chunks
im.init()
return im
def to_bytes(self):
"""Convert the entire image to bytes.
:rtype: bytes
"""
chunks = [PNG_SIGN]
chunks.extend(c[1] for c in self.chunks)
return b"".join(chunks)
def save(self, file):
"""Save the entire image to a file.
:arg file: Output file.
:type file: path-like or file-like
"""
write_file(file, self.to_bytes())
class FrameControl:
"""A data class holding fcTL info."""
def __init__(self, width=None, height=None, x_offset=0, y_offset=0,
delay=100, delay_den=1000, depose_op=1, blend_op=0):
"""Parameters are assigned as object members. See
`https://wiki.mozilla.org/APNG_Specification
<https://wiki.mozilla.org/APNG_Specification#.60fcTL.60:_The_Frame_Control_Chunk>`_
for the detail of fcTL.
"""
self.width = width
self.height = height
self.x_offset = x_offset
self.y_offset = y_offset
self.delay = delay
self.delay_den = delay_den
self.depose_op = depose_op
self.blend_op = blend_op
def to_bytes(self):
"""Convert to bytes.
:rtype: bytes
"""
return struct.pack(
"!IIIIHHbb", self.width, self.height, self.x_offset, self.y_offset,
self.delay, self.delay_den, self.depose_op, self.blend_op
)
@classmethod
def from_bytes(cls, b):
"""Contruct fcTL info from bytes.
:arg bytes b: The length of ``b`` must be *28*, excluding sequence
number and CRC.
"""
return cls(*struct.unpack("!IIIIHHbb", b))
class APNG:
"""Represent an APNG image."""
def __init__(self, num_plays=0):
"""An :class:`APNG` is composed by multiple :class:`PNG` s and
:class:`FrameControl`, which can be inserted with :meth:`append`.
:arg int num_plays: Number of times to loop. 0 = infinite.
:var frames: The frames of APNG.
:vartype frames: list[tuple(PNG, FrameControl)]
:var int num_plays: same as ``num_plays``.
"""
self.frames = []
self.num_plays = num_plays
def append(self, png, **options):
"""Append one frame.
:arg PNG png: Append a :class:`PNG` as a frame.
:arg dict options: The options for :class:`FrameControl`.
"""
if not isinstance(png, PNG):
raise TypeError("Expect an instance of `PNG` but got `{}`".format(png))
control = FrameControl(**options)
if control.width is None:
control.width = png.width
if control.height is None:
control.height = png.height
self.frames.append((png, control))
def append_file(self, file, **options):
"""Create a PNG from file and append the PNG as a frame.
:arg file: Input file.
:type file: path-like or file-like.
:arg dict options: The options for :class:`FrameControl`.
"""
self.append(PNG.open_any(file), **options)
def to_bytes(self):
"""Convert the entire image to bytes.
:rtype: bytes
"""
# grab the chunks we needs
out = [PNG_SIGN]
# FIXME: it's tricky to define "other_chunks". HoneyView stop the
# animation if it sees chunks other than fctl or idat, so we put other
# chunks to the end of the file
other_chunks = []
seq = 0
# for first frame
png, control = self.frames[0]
# header
out.append(png.hdr)
# acTL
out.append(make_chunk("acTL", struct.pack("!II", len(self.frames), self.num_plays)))
# fcTL
if control:
out.append(make_chunk("fcTL", struct.pack("!I", seq) + control.to_bytes()))
seq += 1
# and others...
idat_chunks = []
for type_, data in png.chunks:
if type_ in ("IHDR", "IEND"):
continue
if type_ == "IDAT":
# put at last
idat_chunks.append(data)
continue
out.append(data)
out.extend(idat_chunks)
# FIXME: we should do some optimization to frames...
# for other frames
for png, control in self.frames[1:]:
# fcTL
out.append(
make_chunk("fcTL", struct.pack("!I", seq) + control.to_bytes())
)
seq += 1
# and others...
for type_, data in png.chunks:
if type_ in ("IHDR", "IEND") or type_ in CHUNK_BEFORE_IDAT:
continue
elif type_ == "IDAT":
# convert IDAT to fdAT
out.append(
make_chunk("fdAT", struct.pack("!I", seq) + data[8:-4])
)
seq += 1
else:
other_chunks.append(data)
# end
out.extend(other_chunks)
out.append(png.end)
return b"".join(out)
@classmethod
def from_files(cls, files, **options):
"""Create an APNG from multiple files.
This is a shortcut of::
im = APNG()
for file in files:
im.append_file(file, **options)
:arg list files: A list of filename. See :meth:`PNG.open`.
:arg dict options: Options for :class:`FrameControl`.
:rtype: APNG
"""
im = cls()
for file in files:
im.append_file(file, **options)
return im
@classmethod
def from_bytes(cls, b):
"""Create an APNG from raw bytes.
:arg bytes b: The raw bytes of the APNG file.
:rtype: APNG
"""
hdr = None
head_chunks = []
end = ("IEND", make_chunk("IEND", b""))
frame_chunks = []
frames = []
num_plays = 0
frame_has_head_chunks = False
control = None
for type_, data in parse_chunks(b):
if type_ == "IHDR":
hdr = data
frame_chunks.append((type_, data))
elif type_ == "acTL":
_num_frames, num_plays = struct.unpack("!II", data[8:-4])
continue
elif type_ == "fcTL":
if any(type_ == "IDAT" for type_, data in frame_chunks):
# IDAT inside chunk, go to next frame
frame_chunks.append(end)
frames.append((PNG.from_chunks(frame_chunks), control))
frame_has_head_chunks = False
control = FrameControl.from_bytes(data[12:-4])
# https://github.com/PyCQA/pylint/issues/2072
# pylint: disable=typecheck
hdr = make_chunk("IHDR", struct.pack("!II", control.width, control.height) + hdr[16:-4])
frame_chunks = [("IHDR", hdr)]
else:
control = FrameControl.from_bytes(data[12:-4])
elif type_ == "IDAT":
if not frame_has_head_chunks:
frame_chunks.extend(head_chunks)
frame_has_head_chunks = True
frame_chunks.append((type_, data))
elif type_ == "fdAT":
# convert to IDAT
if not frame_has_head_chunks:
frame_chunks.extend(head_chunks)
frame_has_head_chunks = True
frame_chunks.append(("IDAT", make_chunk("IDAT", data[12:-4])))
elif type_ == "IEND":
# end
frame_chunks.append(end)
frames.append((PNG.from_chunks(frame_chunks), control))
break
elif type_ in CHUNK_BEFORE_IDAT:
head_chunks.append((type_, data))
else:
frame_chunks.append((type_, data))
o = cls()
o.frames = frames
o.num_plays = num_plays
return o
@classmethod
def open(cls, file):
"""Open an APNG file.
:arg file: Input file.
:type file: path-like or file-like.
:rtype: APNG
"""
return cls.from_bytes(read_file(file))
def save(self, file):
"""Save the entire image to a file.
:arg file: Output file.
:type file: path-like or file-like
"""
write_file(file, self.to_bytes())
|
eight04/pyAPNG
|
apng/__init__.py
|
open_file
|
python
|
def open_file(file, mode):
if hasattr(file, "read"):
return file
if hasattr(file, "open"):
return file.open(mode)
return open(file, mode)
|
Open a file.
:arg file: file-like or path-like object.
:arg str mode: ``mode`` argument for :func:`open`.
|
train
|
https://github.com/eight04/pyAPNG/blob/b4d2927f7892a1de967b5cf57d434ed65f6a017e/apng/__init__.py#L140-L150
| null |
#! python3
"""This is an APNG module, which can create apng file from pngs
Reference:
http://littlesvr.ca/apng/
http://wiki.mozilla.org/APNG_Specification
https://www.w3.org/TR/PNG/
"""
import struct
import binascii
import io
import zlib
from collections import namedtuple
__version__ = "0.3.3"
PNG_SIGN = b"\x89\x50\x4E\x47\x0D\x0A\x1A\x0A"
# http://www.libpng.org/pub/png/spec/1.2/PNG-Chunks.html#C.Summary-of-standard-chunks
CHUNK_BEFORE_IDAT = {
"cHRM", "gAMA", "iCCP", "sBIT", "sRGB", "bKGD", "hIST", "tRNS", "pHYs",
"sPLT", "tIME", "PLTE"
}
def parse_chunks(b):
"""Parse PNG bytes into multiple chunks.
:arg bytes b: The raw bytes of the PNG file.
:return: A generator yielding :class:`Chunk`.
:rtype: Iterator[Chunk]
"""
# skip signature
i = 8
# yield chunks
while i < len(b):
data_len, = struct.unpack("!I", b[i:i+4])
type_ = b[i+4:i+8].decode("latin-1")
yield Chunk(type_, b[i:i+data_len+12])
i += data_len + 12
def make_chunk(chunk_type, chunk_data):
"""Create a raw chunk by composing chunk type and data. It
calculates chunk length and CRC for you.
:arg str chunk_type: PNG chunk type.
:arg bytes chunk_data: PNG chunk data, **excluding chunk length, type, and CRC**.
:rtype: bytes
"""
out = struct.pack("!I", len(chunk_data))
chunk_data = chunk_type.encode("latin-1") + chunk_data
out += chunk_data + struct.pack("!I", binascii.crc32(chunk_data) & 0xffffffff)
return out
def make_text_chunk(
type="tEXt", key="Comment", value="",
compression_flag=0, compression_method=0, lang="", translated_key=""):
"""Create a text chunk with a key value pair.
See https://www.w3.org/TR/PNG/#11textinfo for text chunk information.
Usage:
.. code:: python
from apng import APNG, make_text_chunk
im = APNG.open("file.png")
png, control = im.frames[0]
png.chunks.append(make_text_chunk("tEXt", "Comment", "some text"))
im.save("file.png")
:arg str type: Text chunk type: "tEXt", "zTXt", or "iTXt":
tEXt uses Latin-1 characters.
zTXt uses Latin-1 characters, compressed with zlib.
iTXt uses UTF-8 characters.
:arg str key: The key string, 1-79 characters.
:arg str value: The text value. It would be encoded into
:class:`bytes` and compressed if needed.
:arg int compression_flag: The compression flag for iTXt.
:arg int compression_method: The compression method for zTXt and iTXt.
:arg str lang: The language tag for iTXt.
:arg str translated_key: The translated keyword for iTXt.
:rtype: Chunk
"""
# pylint: disable=redefined-builtin
if type == "tEXt":
data = key.encode("latin-1") + b"\0" + value.encode("latin-1")
elif type == "zTXt":
data = (
key.encode("latin-1") + struct.pack("!xb", compression_method) +
zlib.compress(value.encode("latin-1"))
)
elif type == "iTXt":
data = (
key.encode("latin-1") +
struct.pack("!xbb", compression_flag, compression_method) +
lang.encode("latin-1") + b"\0" +
translated_key.encode("utf-8") + b"\0"
)
if compression_flag:
data += zlib.compress(value.encode("utf-8"))
else:
data += value.encode("utf-8")
else:
raise TypeError("unknown type {!r}".format(type))
return Chunk(type, make_chunk(type, data))
def read_file(file):
"""Read ``file`` into ``bytes``.
:arg file type: path-like or file-like
:rtype: bytes
"""
if hasattr(file, "read"):
return file.read()
if hasattr(file, "read_bytes"):
return file.read_bytes()
with open(file, "rb") as f:
return f.read()
def write_file(file, b):
"""Write ``b`` to file ``file``.
:arg file type: path-like or file-like object.
:arg bytes b: The content.
"""
if hasattr(file, "write_bytes"):
file.write_bytes(b)
elif hasattr(file, "write"):
file.write(b)
else:
with open(file, "wb") as f:
f.write(b)
def file_to_png(fp):
"""Convert an image to PNG format with Pillow.
:arg file-like fp: The image file.
:rtype: bytes
"""
import PIL.Image # pylint: disable=import-error
with io.BytesIO() as dest:
PIL.Image.open(fp).save(dest, "PNG", optimize=True)
return dest.getvalue()
class Chunk(namedtuple("Chunk", ["type", "data"])):
"""A namedtuple to represent the PNG chunk.
:arg str type: The chunk type.
:arg bytes data: The raw bytes of the chunk, including chunk length, type,
data, and CRC.
"""
pass
class PNG:
"""Represent a PNG image.
"""
def __init__(self):
self.hdr = None
self.end = None
self.width = None
self.height = None
self.chunks = []
"""A list of :class:`Chunk`. After reading a PNG file, the bytes
are parsed into multiple chunks. You can remove/add chunks into
this array before calling :func:`to_bytes`."""
def init(self):
"""Extract some info from chunks"""
for type_, data in self.chunks:
if type_ == "IHDR":
self.hdr = data
elif type_ == "IEND":
self.end = data
if self.hdr:
# grab w, h info
self.width, self.height = struct.unpack("!II", self.hdr[8:16])
@classmethod
def open(cls, file):
"""Open a PNG file.
:arg file: Input file.
:type file: path-like or file-like
:rtype: :class:`PNG`
"""
return cls.from_bytes(read_file(file))
@classmethod
def open_any(cls, file):
"""Open an image file. If the image is not PNG format, it would convert
the image into PNG with Pillow module. If the module is not
installed, :class:`ImportError` would be raised.
:arg file: Input file.
:type file: path-like or file-like
:rtype: :class:`PNG`
"""
with open_file(file, "rb") as f:
header = f.read(8)
f.seek(0)
if header != PNG_SIGN:
b = file_to_png(f)
else:
b = f.read()
return cls.from_bytes(b)
@classmethod
def from_bytes(cls, b):
"""Create :class:`PNG` from raw bytes.
:arg bytes b: The raw bytes of the PNG file.
:rtype: :class:`PNG`
"""
im = cls()
im.chunks = list(parse_chunks(b))
im.init()
return im
@classmethod
def from_chunks(cls, chunks):
"""Construct PNG from raw chunks.
:arg chunks: A list of ``(chunk_type, chunk_raw_data)``. Also see
:func:`chunks`.
:type chunks: list[tuple(str, bytes)]
"""
im = cls()
im.chunks = chunks
im.init()
return im
def to_bytes(self):
"""Convert the entire image to bytes.
:rtype: bytes
"""
chunks = [PNG_SIGN]
chunks.extend(c[1] for c in self.chunks)
return b"".join(chunks)
def save(self, file):
"""Save the entire image to a file.
:arg file: Output file.
:type file: path-like or file-like
"""
write_file(file, self.to_bytes())
class FrameControl:
"""A data class holding fcTL info."""
def __init__(self, width=None, height=None, x_offset=0, y_offset=0,
delay=100, delay_den=1000, depose_op=1, blend_op=0):
"""Parameters are assigned as object members. See
`https://wiki.mozilla.org/APNG_Specification
<https://wiki.mozilla.org/APNG_Specification#.60fcTL.60:_The_Frame_Control_Chunk>`_
for the detail of fcTL.
"""
self.width = width
self.height = height
self.x_offset = x_offset
self.y_offset = y_offset
self.delay = delay
self.delay_den = delay_den
self.depose_op = depose_op
self.blend_op = blend_op
def to_bytes(self):
"""Convert to bytes.
:rtype: bytes
"""
return struct.pack(
"!IIIIHHbb", self.width, self.height, self.x_offset, self.y_offset,
self.delay, self.delay_den, self.depose_op, self.blend_op
)
@classmethod
def from_bytes(cls, b):
"""Contruct fcTL info from bytes.
:arg bytes b: The length of ``b`` must be *28*, excluding sequence
number and CRC.
"""
return cls(*struct.unpack("!IIIIHHbb", b))
class APNG:
"""Represent an APNG image."""
def __init__(self, num_plays=0):
"""An :class:`APNG` is composed by multiple :class:`PNG` s and
:class:`FrameControl`, which can be inserted with :meth:`append`.
:arg int num_plays: Number of times to loop. 0 = infinite.
:var frames: The frames of APNG.
:vartype frames: list[tuple(PNG, FrameControl)]
:var int num_plays: same as ``num_plays``.
"""
self.frames = []
self.num_plays = num_plays
def append(self, png, **options):
"""Append one frame.
:arg PNG png: Append a :class:`PNG` as a frame.
:arg dict options: The options for :class:`FrameControl`.
"""
if not isinstance(png, PNG):
raise TypeError("Expect an instance of `PNG` but got `{}`".format(png))
control = FrameControl(**options)
if control.width is None:
control.width = png.width
if control.height is None:
control.height = png.height
self.frames.append((png, control))
def append_file(self, file, **options):
"""Create a PNG from file and append the PNG as a frame.
:arg file: Input file.
:type file: path-like or file-like.
:arg dict options: The options for :class:`FrameControl`.
"""
self.append(PNG.open_any(file), **options)
def to_bytes(self):
"""Convert the entire image to bytes.
:rtype: bytes
"""
# grab the chunks we needs
out = [PNG_SIGN]
# FIXME: it's tricky to define "other_chunks". HoneyView stop the
# animation if it sees chunks other than fctl or idat, so we put other
# chunks to the end of the file
other_chunks = []
seq = 0
# for first frame
png, control = self.frames[0]
# header
out.append(png.hdr)
# acTL
out.append(make_chunk("acTL", struct.pack("!II", len(self.frames), self.num_plays)))
# fcTL
if control:
out.append(make_chunk("fcTL", struct.pack("!I", seq) + control.to_bytes()))
seq += 1
# and others...
idat_chunks = []
for type_, data in png.chunks:
if type_ in ("IHDR", "IEND"):
continue
if type_ == "IDAT":
# put at last
idat_chunks.append(data)
continue
out.append(data)
out.extend(idat_chunks)
# FIXME: we should do some optimization to frames...
# for other frames
for png, control in self.frames[1:]:
# fcTL
out.append(
make_chunk("fcTL", struct.pack("!I", seq) + control.to_bytes())
)
seq += 1
# and others...
for type_, data in png.chunks:
if type_ in ("IHDR", "IEND") or type_ in CHUNK_BEFORE_IDAT:
continue
elif type_ == "IDAT":
# convert IDAT to fdAT
out.append(
make_chunk("fdAT", struct.pack("!I", seq) + data[8:-4])
)
seq += 1
else:
other_chunks.append(data)
# end
out.extend(other_chunks)
out.append(png.end)
return b"".join(out)
@classmethod
def from_files(cls, files, **options):
"""Create an APNG from multiple files.
This is a shortcut of::
im = APNG()
for file in files:
im.append_file(file, **options)
:arg list files: A list of filename. See :meth:`PNG.open`.
:arg dict options: Options for :class:`FrameControl`.
:rtype: APNG
"""
im = cls()
for file in files:
im.append_file(file, **options)
return im
@classmethod
def from_bytes(cls, b):
"""Create an APNG from raw bytes.
:arg bytes b: The raw bytes of the APNG file.
:rtype: APNG
"""
hdr = None
head_chunks = []
end = ("IEND", make_chunk("IEND", b""))
frame_chunks = []
frames = []
num_plays = 0
frame_has_head_chunks = False
control = None
for type_, data in parse_chunks(b):
if type_ == "IHDR":
hdr = data
frame_chunks.append((type_, data))
elif type_ == "acTL":
_num_frames, num_plays = struct.unpack("!II", data[8:-4])
continue
elif type_ == "fcTL":
if any(type_ == "IDAT" for type_, data in frame_chunks):
# IDAT inside chunk, go to next frame
frame_chunks.append(end)
frames.append((PNG.from_chunks(frame_chunks), control))
frame_has_head_chunks = False
control = FrameControl.from_bytes(data[12:-4])
# https://github.com/PyCQA/pylint/issues/2072
# pylint: disable=typecheck
hdr = make_chunk("IHDR", struct.pack("!II", control.width, control.height) + hdr[16:-4])
frame_chunks = [("IHDR", hdr)]
else:
control = FrameControl.from_bytes(data[12:-4])
elif type_ == "IDAT":
if not frame_has_head_chunks:
frame_chunks.extend(head_chunks)
frame_has_head_chunks = True
frame_chunks.append((type_, data))
elif type_ == "fdAT":
# convert to IDAT
if not frame_has_head_chunks:
frame_chunks.extend(head_chunks)
frame_has_head_chunks = True
frame_chunks.append(("IDAT", make_chunk("IDAT", data[12:-4])))
elif type_ == "IEND":
# end
frame_chunks.append(end)
frames.append((PNG.from_chunks(frame_chunks), control))
break
elif type_ in CHUNK_BEFORE_IDAT:
head_chunks.append((type_, data))
else:
frame_chunks.append((type_, data))
o = cls()
o.frames = frames
o.num_plays = num_plays
return o
@classmethod
def open(cls, file):
"""Open an APNG file.
:arg file: Input file.
:type file: path-like or file-like.
:rtype: APNG
"""
return cls.from_bytes(read_file(file))
def save(self, file):
"""Save the entire image to a file.
:arg file: Output file.
:type file: path-like or file-like
"""
write_file(file, self.to_bytes())
|
eight04/pyAPNG
|
apng/__init__.py
|
file_to_png
|
python
|
def file_to_png(fp):
import PIL.Image # pylint: disable=import-error
with io.BytesIO() as dest:
PIL.Image.open(fp).save(dest, "PNG", optimize=True)
return dest.getvalue()
|
Convert an image to PNG format with Pillow.
:arg file-like fp: The image file.
:rtype: bytes
|
train
|
https://github.com/eight04/pyAPNG/blob/b4d2927f7892a1de967b5cf57d434ed65f6a017e/apng/__init__.py#L152-L161
| null |
#! python3
"""This is an APNG module, which can create apng file from pngs
Reference:
http://littlesvr.ca/apng/
http://wiki.mozilla.org/APNG_Specification
https://www.w3.org/TR/PNG/
"""
import struct
import binascii
import io
import zlib
from collections import namedtuple
__version__ = "0.3.3"
PNG_SIGN = b"\x89\x50\x4E\x47\x0D\x0A\x1A\x0A"
# http://www.libpng.org/pub/png/spec/1.2/PNG-Chunks.html#C.Summary-of-standard-chunks
CHUNK_BEFORE_IDAT = {
"cHRM", "gAMA", "iCCP", "sBIT", "sRGB", "bKGD", "hIST", "tRNS", "pHYs",
"sPLT", "tIME", "PLTE"
}
def parse_chunks(b):
"""Parse PNG bytes into multiple chunks.
:arg bytes b: The raw bytes of the PNG file.
:return: A generator yielding :class:`Chunk`.
:rtype: Iterator[Chunk]
"""
# skip signature
i = 8
# yield chunks
while i < len(b):
data_len, = struct.unpack("!I", b[i:i+4])
type_ = b[i+4:i+8].decode("latin-1")
yield Chunk(type_, b[i:i+data_len+12])
i += data_len + 12
def make_chunk(chunk_type, chunk_data):
"""Create a raw chunk by composing chunk type and data. It
calculates chunk length and CRC for you.
:arg str chunk_type: PNG chunk type.
:arg bytes chunk_data: PNG chunk data, **excluding chunk length, type, and CRC**.
:rtype: bytes
"""
out = struct.pack("!I", len(chunk_data))
chunk_data = chunk_type.encode("latin-1") + chunk_data
out += chunk_data + struct.pack("!I", binascii.crc32(chunk_data) & 0xffffffff)
return out
def make_text_chunk(
type="tEXt", key="Comment", value="",
compression_flag=0, compression_method=0, lang="", translated_key=""):
"""Create a text chunk with a key value pair.
See https://www.w3.org/TR/PNG/#11textinfo for text chunk information.
Usage:
.. code:: python
from apng import APNG, make_text_chunk
im = APNG.open("file.png")
png, control = im.frames[0]
png.chunks.append(make_text_chunk("tEXt", "Comment", "some text"))
im.save("file.png")
:arg str type: Text chunk type: "tEXt", "zTXt", or "iTXt":
tEXt uses Latin-1 characters.
zTXt uses Latin-1 characters, compressed with zlib.
iTXt uses UTF-8 characters.
:arg str key: The key string, 1-79 characters.
:arg str value: The text value. It would be encoded into
:class:`bytes` and compressed if needed.
:arg int compression_flag: The compression flag for iTXt.
:arg int compression_method: The compression method for zTXt and iTXt.
:arg str lang: The language tag for iTXt.
:arg str translated_key: The translated keyword for iTXt.
:rtype: Chunk
"""
# pylint: disable=redefined-builtin
if type == "tEXt":
data = key.encode("latin-1") + b"\0" + value.encode("latin-1")
elif type == "zTXt":
data = (
key.encode("latin-1") + struct.pack("!xb", compression_method) +
zlib.compress(value.encode("latin-1"))
)
elif type == "iTXt":
data = (
key.encode("latin-1") +
struct.pack("!xbb", compression_flag, compression_method) +
lang.encode("latin-1") + b"\0" +
translated_key.encode("utf-8") + b"\0"
)
if compression_flag:
data += zlib.compress(value.encode("utf-8"))
else:
data += value.encode("utf-8")
else:
raise TypeError("unknown type {!r}".format(type))
return Chunk(type, make_chunk(type, data))
def read_file(file):
"""Read ``file`` into ``bytes``.
:arg file type: path-like or file-like
:rtype: bytes
"""
if hasattr(file, "read"):
return file.read()
if hasattr(file, "read_bytes"):
return file.read_bytes()
with open(file, "rb") as f:
return f.read()
def write_file(file, b):
"""Write ``b`` to file ``file``.
:arg file type: path-like or file-like object.
:arg bytes b: The content.
"""
if hasattr(file, "write_bytes"):
file.write_bytes(b)
elif hasattr(file, "write"):
file.write(b)
else:
with open(file, "wb") as f:
f.write(b)
def open_file(file, mode):
"""Open a file.
:arg file: file-like or path-like object.
:arg str mode: ``mode`` argument for :func:`open`.
"""
if hasattr(file, "read"):
return file
if hasattr(file, "open"):
return file.open(mode)
return open(file, mode)
def file_to_png(fp):
"""Convert an image to PNG format with Pillow.
:arg file-like fp: The image file.
:rtype: bytes
"""
import PIL.Image # pylint: disable=import-error
with io.BytesIO() as dest:
PIL.Image.open(fp).save(dest, "PNG", optimize=True)
return dest.getvalue()
class Chunk(namedtuple("Chunk", ["type", "data"])):
"""A namedtuple to represent the PNG chunk.
:arg str type: The chunk type.
:arg bytes data: The raw bytes of the chunk, including chunk length, type,
data, and CRC.
"""
pass
class PNG:
"""Represent a PNG image.
"""
def __init__(self):
self.hdr = None
self.end = None
self.width = None
self.height = None
self.chunks = []
"""A list of :class:`Chunk`. After reading a PNG file, the bytes
are parsed into multiple chunks. You can remove/add chunks into
this array before calling :func:`to_bytes`."""
def init(self):
"""Extract some info from chunks"""
for type_, data in self.chunks:
if type_ == "IHDR":
self.hdr = data
elif type_ == "IEND":
self.end = data
if self.hdr:
# grab w, h info
self.width, self.height = struct.unpack("!II", self.hdr[8:16])
@classmethod
def open(cls, file):
"""Open a PNG file.
:arg file: Input file.
:type file: path-like or file-like
:rtype: :class:`PNG`
"""
return cls.from_bytes(read_file(file))
@classmethod
def open_any(cls, file):
"""Open an image file. If the image is not PNG format, it would convert
the image into PNG with Pillow module. If the module is not
installed, :class:`ImportError` would be raised.
:arg file: Input file.
:type file: path-like or file-like
:rtype: :class:`PNG`
"""
with open_file(file, "rb") as f:
header = f.read(8)
f.seek(0)
if header != PNG_SIGN:
b = file_to_png(f)
else:
b = f.read()
return cls.from_bytes(b)
@classmethod
def from_bytes(cls, b):
"""Create :class:`PNG` from raw bytes.
:arg bytes b: The raw bytes of the PNG file.
:rtype: :class:`PNG`
"""
im = cls()
im.chunks = list(parse_chunks(b))
im.init()
return im
@classmethod
def from_chunks(cls, chunks):
"""Construct PNG from raw chunks.
:arg chunks: A list of ``(chunk_type, chunk_raw_data)``. Also see
:func:`chunks`.
:type chunks: list[tuple(str, bytes)]
"""
im = cls()
im.chunks = chunks
im.init()
return im
def to_bytes(self):
"""Convert the entire image to bytes.
:rtype: bytes
"""
chunks = [PNG_SIGN]
chunks.extend(c[1] for c in self.chunks)
return b"".join(chunks)
def save(self, file):
"""Save the entire image to a file.
:arg file: Output file.
:type file: path-like or file-like
"""
write_file(file, self.to_bytes())
class FrameControl:
"""A data class holding fcTL info."""
def __init__(self, width=None, height=None, x_offset=0, y_offset=0,
delay=100, delay_den=1000, depose_op=1, blend_op=0):
"""Parameters are assigned as object members. See
`https://wiki.mozilla.org/APNG_Specification
<https://wiki.mozilla.org/APNG_Specification#.60fcTL.60:_The_Frame_Control_Chunk>`_
for the detail of fcTL.
"""
self.width = width
self.height = height
self.x_offset = x_offset
self.y_offset = y_offset
self.delay = delay
self.delay_den = delay_den
self.depose_op = depose_op
self.blend_op = blend_op
def to_bytes(self):
"""Convert to bytes.
:rtype: bytes
"""
return struct.pack(
"!IIIIHHbb", self.width, self.height, self.x_offset, self.y_offset,
self.delay, self.delay_den, self.depose_op, self.blend_op
)
@classmethod
def from_bytes(cls, b):
"""Contruct fcTL info from bytes.
:arg bytes b: The length of ``b`` must be *28*, excluding sequence
number and CRC.
"""
return cls(*struct.unpack("!IIIIHHbb", b))
class APNG:
"""Represent an APNG image."""
def __init__(self, num_plays=0):
"""An :class:`APNG` is composed by multiple :class:`PNG` s and
:class:`FrameControl`, which can be inserted with :meth:`append`.
:arg int num_plays: Number of times to loop. 0 = infinite.
:var frames: The frames of APNG.
:vartype frames: list[tuple(PNG, FrameControl)]
:var int num_plays: same as ``num_plays``.
"""
self.frames = []
self.num_plays = num_plays
def append(self, png, **options):
"""Append one frame.
:arg PNG png: Append a :class:`PNG` as a frame.
:arg dict options: The options for :class:`FrameControl`.
"""
if not isinstance(png, PNG):
raise TypeError("Expect an instance of `PNG` but got `{}`".format(png))
control = FrameControl(**options)
if control.width is None:
control.width = png.width
if control.height is None:
control.height = png.height
self.frames.append((png, control))
def append_file(self, file, **options):
"""Create a PNG from file and append the PNG as a frame.
:arg file: Input file.
:type file: path-like or file-like.
:arg dict options: The options for :class:`FrameControl`.
"""
self.append(PNG.open_any(file), **options)
def to_bytes(self):
"""Convert the entire image to bytes.
:rtype: bytes
"""
# grab the chunks we needs
out = [PNG_SIGN]
# FIXME: it's tricky to define "other_chunks". HoneyView stop the
# animation if it sees chunks other than fctl or idat, so we put other
# chunks to the end of the file
other_chunks = []
seq = 0
# for first frame
png, control = self.frames[0]
# header
out.append(png.hdr)
# acTL
out.append(make_chunk("acTL", struct.pack("!II", len(self.frames), self.num_plays)))
# fcTL
if control:
out.append(make_chunk("fcTL", struct.pack("!I", seq) + control.to_bytes()))
seq += 1
# and others...
idat_chunks = []
for type_, data in png.chunks:
if type_ in ("IHDR", "IEND"):
continue
if type_ == "IDAT":
# put at last
idat_chunks.append(data)
continue
out.append(data)
out.extend(idat_chunks)
# FIXME: we should do some optimization to frames...
# for other frames
for png, control in self.frames[1:]:
# fcTL
out.append(
make_chunk("fcTL", struct.pack("!I", seq) + control.to_bytes())
)
seq += 1
# and others...
for type_, data in png.chunks:
if type_ in ("IHDR", "IEND") or type_ in CHUNK_BEFORE_IDAT:
continue
elif type_ == "IDAT":
# convert IDAT to fdAT
out.append(
make_chunk("fdAT", struct.pack("!I", seq) + data[8:-4])
)
seq += 1
else:
other_chunks.append(data)
# end
out.extend(other_chunks)
out.append(png.end)
return b"".join(out)
@classmethod
def from_files(cls, files, **options):
"""Create an APNG from multiple files.
This is a shortcut of::
im = APNG()
for file in files:
im.append_file(file, **options)
:arg list files: A list of filename. See :meth:`PNG.open`.
:arg dict options: Options for :class:`FrameControl`.
:rtype: APNG
"""
im = cls()
for file in files:
im.append_file(file, **options)
return im
@classmethod
def from_bytes(cls, b):
"""Create an APNG from raw bytes.
:arg bytes b: The raw bytes of the APNG file.
:rtype: APNG
"""
hdr = None
head_chunks = []
end = ("IEND", make_chunk("IEND", b""))
frame_chunks = []
frames = []
num_plays = 0
frame_has_head_chunks = False
control = None
for type_, data in parse_chunks(b):
if type_ == "IHDR":
hdr = data
frame_chunks.append((type_, data))
elif type_ == "acTL":
_num_frames, num_plays = struct.unpack("!II", data[8:-4])
continue
elif type_ == "fcTL":
if any(type_ == "IDAT" for type_, data in frame_chunks):
# IDAT inside chunk, go to next frame
frame_chunks.append(end)
frames.append((PNG.from_chunks(frame_chunks), control))
frame_has_head_chunks = False
control = FrameControl.from_bytes(data[12:-4])
# https://github.com/PyCQA/pylint/issues/2072
# pylint: disable=typecheck
hdr = make_chunk("IHDR", struct.pack("!II", control.width, control.height) + hdr[16:-4])
frame_chunks = [("IHDR", hdr)]
else:
control = FrameControl.from_bytes(data[12:-4])
elif type_ == "IDAT":
if not frame_has_head_chunks:
frame_chunks.extend(head_chunks)
frame_has_head_chunks = True
frame_chunks.append((type_, data))
elif type_ == "fdAT":
# convert to IDAT
if not frame_has_head_chunks:
frame_chunks.extend(head_chunks)
frame_has_head_chunks = True
frame_chunks.append(("IDAT", make_chunk("IDAT", data[12:-4])))
elif type_ == "IEND":
# end
frame_chunks.append(end)
frames.append((PNG.from_chunks(frame_chunks), control))
break
elif type_ in CHUNK_BEFORE_IDAT:
head_chunks.append((type_, data))
else:
frame_chunks.append((type_, data))
o = cls()
o.frames = frames
o.num_plays = num_plays
return o
@classmethod
def open(cls, file):
"""Open an APNG file.
:arg file: Input file.
:type file: path-like or file-like.
:rtype: APNG
"""
return cls.from_bytes(read_file(file))
def save(self, file):
"""Save the entire image to a file.
:arg file: Output file.
:type file: path-like or file-like
"""
write_file(file, self.to_bytes())
|
eight04/pyAPNG
|
apng/__init__.py
|
PNG.init
|
python
|
def init(self):
for type_, data in self.chunks:
if type_ == "IHDR":
self.hdr = data
elif type_ == "IEND":
self.end = data
if self.hdr:
# grab w, h info
self.width, self.height = struct.unpack("!II", self.hdr[8:16])
|
Extract some info from chunks
|
train
|
https://github.com/eight04/pyAPNG/blob/b4d2927f7892a1de967b5cf57d434ed65f6a017e/apng/__init__.py#L185-L195
| null |
class PNG:
"""Represent a PNG image.
"""
def __init__(self):
self.hdr = None
self.end = None
self.width = None
self.height = None
self.chunks = []
"""A list of :class:`Chunk`. After reading a PNG file, the bytes
are parsed into multiple chunks. You can remove/add chunks into
this array before calling :func:`to_bytes`."""
def init(self):
"""Extract some info from chunks"""
for type_, data in self.chunks:
if type_ == "IHDR":
self.hdr = data
elif type_ == "IEND":
self.end = data
if self.hdr:
# grab w, h info
self.width, self.height = struct.unpack("!II", self.hdr[8:16])
@classmethod
def open(cls, file):
"""Open a PNG file.
:arg file: Input file.
:type file: path-like or file-like
:rtype: :class:`PNG`
"""
return cls.from_bytes(read_file(file))
@classmethod
def open_any(cls, file):
"""Open an image file. If the image is not PNG format, it would convert
the image into PNG with Pillow module. If the module is not
installed, :class:`ImportError` would be raised.
:arg file: Input file.
:type file: path-like or file-like
:rtype: :class:`PNG`
"""
with open_file(file, "rb") as f:
header = f.read(8)
f.seek(0)
if header != PNG_SIGN:
b = file_to_png(f)
else:
b = f.read()
return cls.from_bytes(b)
@classmethod
def from_bytes(cls, b):
"""Create :class:`PNG` from raw bytes.
:arg bytes b: The raw bytes of the PNG file.
:rtype: :class:`PNG`
"""
im = cls()
im.chunks = list(parse_chunks(b))
im.init()
return im
@classmethod
def from_chunks(cls, chunks):
"""Construct PNG from raw chunks.
:arg chunks: A list of ``(chunk_type, chunk_raw_data)``. Also see
:func:`chunks`.
:type chunks: list[tuple(str, bytes)]
"""
im = cls()
im.chunks = chunks
im.init()
return im
def to_bytes(self):
"""Convert the entire image to bytes.
:rtype: bytes
"""
chunks = [PNG_SIGN]
chunks.extend(c[1] for c in self.chunks)
return b"".join(chunks)
def save(self, file):
"""Save the entire image to a file.
:arg file: Output file.
:type file: path-like or file-like
"""
write_file(file, self.to_bytes())
|
eight04/pyAPNG
|
apng/__init__.py
|
PNG.open_any
|
python
|
def open_any(cls, file):
with open_file(file, "rb") as f:
header = f.read(8)
f.seek(0)
if header != PNG_SIGN:
b = file_to_png(f)
else:
b = f.read()
return cls.from_bytes(b)
|
Open an image file. If the image is not PNG format, it would convert
the image into PNG with Pillow module. If the module is not
installed, :class:`ImportError` would be raised.
:arg file: Input file.
:type file: path-like or file-like
:rtype: :class:`PNG`
|
train
|
https://github.com/eight04/pyAPNG/blob/b4d2927f7892a1de967b5cf57d434ed65f6a017e/apng/__init__.py#L208-L224
|
[
"def open_file(file, mode):\n\t\"\"\"Open a file.\n\n\t:arg file: file-like or path-like object.\n\t:arg str mode: ``mode`` argument for :func:`open`.\n\t\"\"\"\n\tif hasattr(file, \"read\"):\n\t\treturn file\n\tif hasattr(file, \"open\"):\n\t\treturn file.open(mode)\n\treturn open(file, mode)\n",
"def from_bytes(cls, b):\n\t\"\"\"Create :class:`PNG` from raw bytes.\n\n\t:arg bytes b: The raw bytes of the PNG file.\n\t:rtype: :class:`PNG`\n\t\"\"\"\n\tim = cls()\n\tim.chunks = list(parse_chunks(b))\n\tim.init()\n\treturn im\n"
] |
class PNG:
"""Represent a PNG image.
"""
def __init__(self):
self.hdr = None
self.end = None
self.width = None
self.height = None
self.chunks = []
"""A list of :class:`Chunk`. After reading a PNG file, the bytes
are parsed into multiple chunks. You can remove/add chunks into
this array before calling :func:`to_bytes`."""
def init(self):
"""Extract some info from chunks"""
for type_, data in self.chunks:
if type_ == "IHDR":
self.hdr = data
elif type_ == "IEND":
self.end = data
if self.hdr:
# grab w, h info
self.width, self.height = struct.unpack("!II", self.hdr[8:16])
@classmethod
def open(cls, file):
"""Open a PNG file.
:arg file: Input file.
:type file: path-like or file-like
:rtype: :class:`PNG`
"""
return cls.from_bytes(read_file(file))
@classmethod
def open_any(cls, file):
"""Open an image file. If the image is not PNG format, it would convert
the image into PNG with Pillow module. If the module is not
installed, :class:`ImportError` would be raised.
:arg file: Input file.
:type file: path-like or file-like
:rtype: :class:`PNG`
"""
with open_file(file, "rb") as f:
header = f.read(8)
f.seek(0)
if header != PNG_SIGN:
b = file_to_png(f)
else:
b = f.read()
return cls.from_bytes(b)
@classmethod
def from_bytes(cls, b):
"""Create :class:`PNG` from raw bytes.
:arg bytes b: The raw bytes of the PNG file.
:rtype: :class:`PNG`
"""
im = cls()
im.chunks = list(parse_chunks(b))
im.init()
return im
@classmethod
def from_chunks(cls, chunks):
"""Construct PNG from raw chunks.
:arg chunks: A list of ``(chunk_type, chunk_raw_data)``. Also see
:func:`chunks`.
:type chunks: list[tuple(str, bytes)]
"""
im = cls()
im.chunks = chunks
im.init()
return im
def to_bytes(self):
"""Convert the entire image to bytes.
:rtype: bytes
"""
chunks = [PNG_SIGN]
chunks.extend(c[1] for c in self.chunks)
return b"".join(chunks)
def save(self, file):
"""Save the entire image to a file.
:arg file: Output file.
:type file: path-like or file-like
"""
write_file(file, self.to_bytes())
|
eight04/pyAPNG
|
apng/__init__.py
|
PNG.from_bytes
|
python
|
def from_bytes(cls, b):
im = cls()
im.chunks = list(parse_chunks(b))
im.init()
return im
|
Create :class:`PNG` from raw bytes.
:arg bytes b: The raw bytes of the PNG file.
:rtype: :class:`PNG`
|
train
|
https://github.com/eight04/pyAPNG/blob/b4d2927f7892a1de967b5cf57d434ed65f6a017e/apng/__init__.py#L227-L236
|
[
"def parse_chunks(b):\n\t\"\"\"Parse PNG bytes into multiple chunks. \n\n\t:arg bytes b: The raw bytes of the PNG file.\n\t:return: A generator yielding :class:`Chunk`.\n\t:rtype: Iterator[Chunk]\n\t\"\"\"\n\t# skip signature\n\ti = 8\n\t# yield chunks\n\twhile i < len(b):\n\t\tdata_len, = struct.unpack(\"!I\", b[i:i+4])\n\t\ttype_ = b[i+4:i+8].decode(\"latin-1\")\n\t\tyield Chunk(type_, b[i:i+data_len+12])\n\t\ti += data_len + 12\n",
"def init(self):\n\t\"\"\"Extract some info from chunks\"\"\"\n\tfor type_, data in self.chunks:\n\t\tif type_ == \"IHDR\":\n\t\t\tself.hdr = data\n\t\telif type_ == \"IEND\":\n\t\t\tself.end = data\n\n\tif self.hdr:\n\t\t# grab w, h info\n\t\tself.width, self.height = struct.unpack(\"!II\", self.hdr[8:16])\n"
] |
class PNG:
"""Represent a PNG image.
"""
def __init__(self):
self.hdr = None
self.end = None
self.width = None
self.height = None
self.chunks = []
"""A list of :class:`Chunk`. After reading a PNG file, the bytes
are parsed into multiple chunks. You can remove/add chunks into
this array before calling :func:`to_bytes`."""
def init(self):
"""Extract some info from chunks"""
for type_, data in self.chunks:
if type_ == "IHDR":
self.hdr = data
elif type_ == "IEND":
self.end = data
if self.hdr:
# grab w, h info
self.width, self.height = struct.unpack("!II", self.hdr[8:16])
@classmethod
def open(cls, file):
"""Open a PNG file.
:arg file: Input file.
:type file: path-like or file-like
:rtype: :class:`PNG`
"""
return cls.from_bytes(read_file(file))
@classmethod
def open_any(cls, file):
"""Open an image file. If the image is not PNG format, it would convert
the image into PNG with Pillow module. If the module is not
installed, :class:`ImportError` would be raised.
:arg file: Input file.
:type file: path-like or file-like
:rtype: :class:`PNG`
"""
with open_file(file, "rb") as f:
header = f.read(8)
f.seek(0)
if header != PNG_SIGN:
b = file_to_png(f)
else:
b = f.read()
return cls.from_bytes(b)
@classmethod
def from_bytes(cls, b):
"""Create :class:`PNG` from raw bytes.
:arg bytes b: The raw bytes of the PNG file.
:rtype: :class:`PNG`
"""
im = cls()
im.chunks = list(parse_chunks(b))
im.init()
return im
@classmethod
def from_chunks(cls, chunks):
"""Construct PNG from raw chunks.
:arg chunks: A list of ``(chunk_type, chunk_raw_data)``. Also see
:func:`chunks`.
:type chunks: list[tuple(str, bytes)]
"""
im = cls()
im.chunks = chunks
im.init()
return im
def to_bytes(self):
"""Convert the entire image to bytes.
:rtype: bytes
"""
chunks = [PNG_SIGN]
chunks.extend(c[1] for c in self.chunks)
return b"".join(chunks)
def save(self, file):
"""Save the entire image to a file.
:arg file: Output file.
:type file: path-like or file-like
"""
write_file(file, self.to_bytes())
|
eight04/pyAPNG
|
apng/__init__.py
|
PNG.from_chunks
|
python
|
def from_chunks(cls, chunks):
im = cls()
im.chunks = chunks
im.init()
return im
|
Construct PNG from raw chunks.
:arg chunks: A list of ``(chunk_type, chunk_raw_data)``. Also see
:func:`chunks`.
:type chunks: list[tuple(str, bytes)]
|
train
|
https://github.com/eight04/pyAPNG/blob/b4d2927f7892a1de967b5cf57d434ed65f6a017e/apng/__init__.py#L239-L249
| null |
class PNG:
"""Represent a PNG image.
"""
def __init__(self):
self.hdr = None
self.end = None
self.width = None
self.height = None
self.chunks = []
"""A list of :class:`Chunk`. After reading a PNG file, the bytes
are parsed into multiple chunks. You can remove/add chunks into
this array before calling :func:`to_bytes`."""
def init(self):
"""Extract some info from chunks"""
for type_, data in self.chunks:
if type_ == "IHDR":
self.hdr = data
elif type_ == "IEND":
self.end = data
if self.hdr:
# grab w, h info
self.width, self.height = struct.unpack("!II", self.hdr[8:16])
@classmethod
def open(cls, file):
"""Open a PNG file.
:arg file: Input file.
:type file: path-like or file-like
:rtype: :class:`PNG`
"""
return cls.from_bytes(read_file(file))
@classmethod
def open_any(cls, file):
"""Open an image file. If the image is not PNG format, it would convert
the image into PNG with Pillow module. If the module is not
installed, :class:`ImportError` would be raised.
:arg file: Input file.
:type file: path-like or file-like
:rtype: :class:`PNG`
"""
with open_file(file, "rb") as f:
header = f.read(8)
f.seek(0)
if header != PNG_SIGN:
b = file_to_png(f)
else:
b = f.read()
return cls.from_bytes(b)
@classmethod
def from_bytes(cls, b):
"""Create :class:`PNG` from raw bytes.
:arg bytes b: The raw bytes of the PNG file.
:rtype: :class:`PNG`
"""
im = cls()
im.chunks = list(parse_chunks(b))
im.init()
return im
@classmethod
def from_chunks(cls, chunks):
"""Construct PNG from raw chunks.
:arg chunks: A list of ``(chunk_type, chunk_raw_data)``. Also see
:func:`chunks`.
:type chunks: list[tuple(str, bytes)]
"""
im = cls()
im.chunks = chunks
im.init()
return im
def to_bytes(self):
"""Convert the entire image to bytes.
:rtype: bytes
"""
chunks = [PNG_SIGN]
chunks.extend(c[1] for c in self.chunks)
return b"".join(chunks)
def save(self, file):
"""Save the entire image to a file.
:arg file: Output file.
:type file: path-like or file-like
"""
write_file(file, self.to_bytes())
|
eight04/pyAPNG
|
apng/__init__.py
|
PNG.to_bytes
|
python
|
def to_bytes(self):
chunks = [PNG_SIGN]
chunks.extend(c[1] for c in self.chunks)
return b"".join(chunks)
|
Convert the entire image to bytes.
:rtype: bytes
|
train
|
https://github.com/eight04/pyAPNG/blob/b4d2927f7892a1de967b5cf57d434ed65f6a017e/apng/__init__.py#L252-L259
| null |
class PNG:
"""Represent a PNG image.
"""
def __init__(self):
self.hdr = None
self.end = None
self.width = None
self.height = None
self.chunks = []
"""A list of :class:`Chunk`. After reading a PNG file, the bytes
are parsed into multiple chunks. You can remove/add chunks into
this array before calling :func:`to_bytes`."""
def init(self):
"""Extract some info from chunks"""
for type_, data in self.chunks:
if type_ == "IHDR":
self.hdr = data
elif type_ == "IEND":
self.end = data
if self.hdr:
# grab w, h info
self.width, self.height = struct.unpack("!II", self.hdr[8:16])
@classmethod
def open(cls, file):
"""Open a PNG file.
:arg file: Input file.
:type file: path-like or file-like
:rtype: :class:`PNG`
"""
return cls.from_bytes(read_file(file))
@classmethod
def open_any(cls, file):
"""Open an image file. If the image is not PNG format, it would convert
the image into PNG with Pillow module. If the module is not
installed, :class:`ImportError` would be raised.
:arg file: Input file.
:type file: path-like or file-like
:rtype: :class:`PNG`
"""
with open_file(file, "rb") as f:
header = f.read(8)
f.seek(0)
if header != PNG_SIGN:
b = file_to_png(f)
else:
b = f.read()
return cls.from_bytes(b)
@classmethod
def from_bytes(cls, b):
"""Create :class:`PNG` from raw bytes.
:arg bytes b: The raw bytes of the PNG file.
:rtype: :class:`PNG`
"""
im = cls()
im.chunks = list(parse_chunks(b))
im.init()
return im
@classmethod
def from_chunks(cls, chunks):
"""Construct PNG from raw chunks.
:arg chunks: A list of ``(chunk_type, chunk_raw_data)``. Also see
:func:`chunks`.
:type chunks: list[tuple(str, bytes)]
"""
im = cls()
im.chunks = chunks
im.init()
return im
def to_bytes(self):
"""Convert the entire image to bytes.
:rtype: bytes
"""
chunks = [PNG_SIGN]
chunks.extend(c[1] for c in self.chunks)
return b"".join(chunks)
def save(self, file):
"""Save the entire image to a file.
:arg file: Output file.
:type file: path-like or file-like
"""
write_file(file, self.to_bytes())
|
eight04/pyAPNG
|
apng/__init__.py
|
FrameControl.to_bytes
|
python
|
def to_bytes(self):
return struct.pack(
"!IIIIHHbb", self.width, self.height, self.x_offset, self.y_offset,
self.delay, self.delay_den, self.depose_op, self.blend_op
)
|
Convert to bytes.
:rtype: bytes
|
train
|
https://github.com/eight04/pyAPNG/blob/b4d2927f7892a1de967b5cf57d434ed65f6a017e/apng/__init__.py#L287-L295
| null |
class FrameControl:
"""A data class holding fcTL info."""
def __init__(self, width=None, height=None, x_offset=0, y_offset=0,
delay=100, delay_den=1000, depose_op=1, blend_op=0):
"""Parameters are assigned as object members. See
`https://wiki.mozilla.org/APNG_Specification
<https://wiki.mozilla.org/APNG_Specification#.60fcTL.60:_The_Frame_Control_Chunk>`_
for the detail of fcTL.
"""
self.width = width
self.height = height
self.x_offset = x_offset
self.y_offset = y_offset
self.delay = delay
self.delay_den = delay_den
self.depose_op = depose_op
self.blend_op = blend_op
def to_bytes(self):
"""Convert to bytes.
:rtype: bytes
"""
return struct.pack(
"!IIIIHHbb", self.width, self.height, self.x_offset, self.y_offset,
self.delay, self.delay_den, self.depose_op, self.blend_op
)
@classmethod
def from_bytes(cls, b):
"""Contruct fcTL info from bytes.
:arg bytes b: The length of ``b`` must be *28*, excluding sequence
number and CRC.
"""
return cls(*struct.unpack("!IIIIHHbb", b))
|
eight04/pyAPNG
|
apng/__init__.py
|
APNG.append
|
python
|
def append(self, png, **options):
if not isinstance(png, PNG):
raise TypeError("Expect an instance of `PNG` but got `{}`".format(png))
control = FrameControl(**options)
if control.width is None:
control.width = png.width
if control.height is None:
control.height = png.height
self.frames.append((png, control))
|
Append one frame.
:arg PNG png: Append a :class:`PNG` as a frame.
:arg dict options: The options for :class:`FrameControl`.
|
train
|
https://github.com/eight04/pyAPNG/blob/b4d2927f7892a1de967b5cf57d434ed65f6a017e/apng/__init__.py#L321-L334
| null |
class APNG:
"""Represent an APNG image."""
def __init__(self, num_plays=0):
"""An :class:`APNG` is composed by multiple :class:`PNG` s and
:class:`FrameControl`, which can be inserted with :meth:`append`.
:arg int num_plays: Number of times to loop. 0 = infinite.
:var frames: The frames of APNG.
:vartype frames: list[tuple(PNG, FrameControl)]
:var int num_plays: same as ``num_plays``.
"""
self.frames = []
self.num_plays = num_plays
def append(self, png, **options):
"""Append one frame.
:arg PNG png: Append a :class:`PNG` as a frame.
:arg dict options: The options for :class:`FrameControl`.
"""
if not isinstance(png, PNG):
raise TypeError("Expect an instance of `PNG` but got `{}`".format(png))
control = FrameControl(**options)
if control.width is None:
control.width = png.width
if control.height is None:
control.height = png.height
self.frames.append((png, control))
def append_file(self, file, **options):
"""Create a PNG from file and append the PNG as a frame.
:arg file: Input file.
:type file: path-like or file-like.
:arg dict options: The options for :class:`FrameControl`.
"""
self.append(PNG.open_any(file), **options)
def to_bytes(self):
"""Convert the entire image to bytes.
:rtype: bytes
"""
# grab the chunks we needs
out = [PNG_SIGN]
# FIXME: it's tricky to define "other_chunks". HoneyView stop the
# animation if it sees chunks other than fctl or idat, so we put other
# chunks to the end of the file
other_chunks = []
seq = 0
# for first frame
png, control = self.frames[0]
# header
out.append(png.hdr)
# acTL
out.append(make_chunk("acTL", struct.pack("!II", len(self.frames), self.num_plays)))
# fcTL
if control:
out.append(make_chunk("fcTL", struct.pack("!I", seq) + control.to_bytes()))
seq += 1
# and others...
idat_chunks = []
for type_, data in png.chunks:
if type_ in ("IHDR", "IEND"):
continue
if type_ == "IDAT":
# put at last
idat_chunks.append(data)
continue
out.append(data)
out.extend(idat_chunks)
# FIXME: we should do some optimization to frames...
# for other frames
for png, control in self.frames[1:]:
# fcTL
out.append(
make_chunk("fcTL", struct.pack("!I", seq) + control.to_bytes())
)
seq += 1
# and others...
for type_, data in png.chunks:
if type_ in ("IHDR", "IEND") or type_ in CHUNK_BEFORE_IDAT:
continue
elif type_ == "IDAT":
# convert IDAT to fdAT
out.append(
make_chunk("fdAT", struct.pack("!I", seq) + data[8:-4])
)
seq += 1
else:
other_chunks.append(data)
# end
out.extend(other_chunks)
out.append(png.end)
return b"".join(out)
@classmethod
def from_files(cls, files, **options):
"""Create an APNG from multiple files.
This is a shortcut of::
im = APNG()
for file in files:
im.append_file(file, **options)
:arg list files: A list of filename. See :meth:`PNG.open`.
:arg dict options: Options for :class:`FrameControl`.
:rtype: APNG
"""
im = cls()
for file in files:
im.append_file(file, **options)
return im
@classmethod
def from_bytes(cls, b):
"""Create an APNG from raw bytes.
:arg bytes b: The raw bytes of the APNG file.
:rtype: APNG
"""
hdr = None
head_chunks = []
end = ("IEND", make_chunk("IEND", b""))
frame_chunks = []
frames = []
num_plays = 0
frame_has_head_chunks = False
control = None
for type_, data in parse_chunks(b):
if type_ == "IHDR":
hdr = data
frame_chunks.append((type_, data))
elif type_ == "acTL":
_num_frames, num_plays = struct.unpack("!II", data[8:-4])
continue
elif type_ == "fcTL":
if any(type_ == "IDAT" for type_, data in frame_chunks):
# IDAT inside chunk, go to next frame
frame_chunks.append(end)
frames.append((PNG.from_chunks(frame_chunks), control))
frame_has_head_chunks = False
control = FrameControl.from_bytes(data[12:-4])
# https://github.com/PyCQA/pylint/issues/2072
# pylint: disable=typecheck
hdr = make_chunk("IHDR", struct.pack("!II", control.width, control.height) + hdr[16:-4])
frame_chunks = [("IHDR", hdr)]
else:
control = FrameControl.from_bytes(data[12:-4])
elif type_ == "IDAT":
if not frame_has_head_chunks:
frame_chunks.extend(head_chunks)
frame_has_head_chunks = True
frame_chunks.append((type_, data))
elif type_ == "fdAT":
# convert to IDAT
if not frame_has_head_chunks:
frame_chunks.extend(head_chunks)
frame_has_head_chunks = True
frame_chunks.append(("IDAT", make_chunk("IDAT", data[12:-4])))
elif type_ == "IEND":
# end
frame_chunks.append(end)
frames.append((PNG.from_chunks(frame_chunks), control))
break
elif type_ in CHUNK_BEFORE_IDAT:
head_chunks.append((type_, data))
else:
frame_chunks.append((type_, data))
o = cls()
o.frames = frames
o.num_plays = num_plays
return o
@classmethod
def open(cls, file):
"""Open an APNG file.
:arg file: Input file.
:type file: path-like or file-like.
:rtype: APNG
"""
return cls.from_bytes(read_file(file))
def save(self, file):
"""Save the entire image to a file.
:arg file: Output file.
:type file: path-like or file-like
"""
write_file(file, self.to_bytes())
|
eight04/pyAPNG
|
apng/__init__.py
|
APNG.append_file
|
python
|
def append_file(self, file, **options):
self.append(PNG.open_any(file), **options)
|
Create a PNG from file and append the PNG as a frame.
:arg file: Input file.
:type file: path-like or file-like.
:arg dict options: The options for :class:`FrameControl`.
|
train
|
https://github.com/eight04/pyAPNG/blob/b4d2927f7892a1de967b5cf57d434ed65f6a017e/apng/__init__.py#L336-L343
|
[
"def open_any(cls, file):\n\t\"\"\"Open an image file. If the image is not PNG format, it would convert\n\tthe image into PNG with Pillow module. If the module is not\n\tinstalled, :class:`ImportError` would be raised.\n\n\t:arg file: Input file.\n\t:type file: path-like or file-like\n\t:rtype: :class:`PNG`\n\t\"\"\"\n\twith open_file(file, \"rb\") as f:\n\t\theader = f.read(8)\n\t\tf.seek(0)\n\t\tif header != PNG_SIGN:\n\t\t\tb = file_to_png(f)\n\t\telse:\n\t\t\tb = f.read()\n\treturn cls.from_bytes(b)\n",
"def append(self, png, **options):\n\t\"\"\"Append one frame.\n\n\t:arg PNG png: Append a :class:`PNG` as a frame.\n\t:arg dict options: The options for :class:`FrameControl`.\n\t\"\"\"\n\tif not isinstance(png, PNG):\n\t\traise TypeError(\"Expect an instance of `PNG` but got `{}`\".format(png))\n\tcontrol = FrameControl(**options)\n\tif control.width is None:\n\t\tcontrol.width = png.width\n\tif control.height is None:\n\t\tcontrol.height = png.height\n\tself.frames.append((png, control))\n"
] |
class APNG:
"""Represent an APNG image."""
def __init__(self, num_plays=0):
"""An :class:`APNG` is composed by multiple :class:`PNG` s and
:class:`FrameControl`, which can be inserted with :meth:`append`.
:arg int num_plays: Number of times to loop. 0 = infinite.
:var frames: The frames of APNG.
:vartype frames: list[tuple(PNG, FrameControl)]
:var int num_plays: same as ``num_plays``.
"""
self.frames = []
self.num_plays = num_plays
def append(self, png, **options):
"""Append one frame.
:arg PNG png: Append a :class:`PNG` as a frame.
:arg dict options: The options for :class:`FrameControl`.
"""
if not isinstance(png, PNG):
raise TypeError("Expect an instance of `PNG` but got `{}`".format(png))
control = FrameControl(**options)
if control.width is None:
control.width = png.width
if control.height is None:
control.height = png.height
self.frames.append((png, control))
def append_file(self, file, **options):
"""Create a PNG from file and append the PNG as a frame.
:arg file: Input file.
:type file: path-like or file-like.
:arg dict options: The options for :class:`FrameControl`.
"""
self.append(PNG.open_any(file), **options)
def to_bytes(self):
"""Convert the entire image to bytes.
:rtype: bytes
"""
# grab the chunks we needs
out = [PNG_SIGN]
# FIXME: it's tricky to define "other_chunks". HoneyView stop the
# animation if it sees chunks other than fctl or idat, so we put other
# chunks to the end of the file
other_chunks = []
seq = 0
# for first frame
png, control = self.frames[0]
# header
out.append(png.hdr)
# acTL
out.append(make_chunk("acTL", struct.pack("!II", len(self.frames), self.num_plays)))
# fcTL
if control:
out.append(make_chunk("fcTL", struct.pack("!I", seq) + control.to_bytes()))
seq += 1
# and others...
idat_chunks = []
for type_, data in png.chunks:
if type_ in ("IHDR", "IEND"):
continue
if type_ == "IDAT":
# put at last
idat_chunks.append(data)
continue
out.append(data)
out.extend(idat_chunks)
# FIXME: we should do some optimization to frames...
# for other frames
for png, control in self.frames[1:]:
# fcTL
out.append(
make_chunk("fcTL", struct.pack("!I", seq) + control.to_bytes())
)
seq += 1
# and others...
for type_, data in png.chunks:
if type_ in ("IHDR", "IEND") or type_ in CHUNK_BEFORE_IDAT:
continue
elif type_ == "IDAT":
# convert IDAT to fdAT
out.append(
make_chunk("fdAT", struct.pack("!I", seq) + data[8:-4])
)
seq += 1
else:
other_chunks.append(data)
# end
out.extend(other_chunks)
out.append(png.end)
return b"".join(out)
@classmethod
def from_files(cls, files, **options):
"""Create an APNG from multiple files.
This is a shortcut of::
im = APNG()
for file in files:
im.append_file(file, **options)
:arg list files: A list of filename. See :meth:`PNG.open`.
:arg dict options: Options for :class:`FrameControl`.
:rtype: APNG
"""
im = cls()
for file in files:
im.append_file(file, **options)
return im
@classmethod
def from_bytes(cls, b):
"""Create an APNG from raw bytes.
:arg bytes b: The raw bytes of the APNG file.
:rtype: APNG
"""
hdr = None
head_chunks = []
end = ("IEND", make_chunk("IEND", b""))
frame_chunks = []
frames = []
num_plays = 0
frame_has_head_chunks = False
control = None
for type_, data in parse_chunks(b):
if type_ == "IHDR":
hdr = data
frame_chunks.append((type_, data))
elif type_ == "acTL":
_num_frames, num_plays = struct.unpack("!II", data[8:-4])
continue
elif type_ == "fcTL":
if any(type_ == "IDAT" for type_, data in frame_chunks):
# IDAT inside chunk, go to next frame
frame_chunks.append(end)
frames.append((PNG.from_chunks(frame_chunks), control))
frame_has_head_chunks = False
control = FrameControl.from_bytes(data[12:-4])
# https://github.com/PyCQA/pylint/issues/2072
# pylint: disable=typecheck
hdr = make_chunk("IHDR", struct.pack("!II", control.width, control.height) + hdr[16:-4])
frame_chunks = [("IHDR", hdr)]
else:
control = FrameControl.from_bytes(data[12:-4])
elif type_ == "IDAT":
if not frame_has_head_chunks:
frame_chunks.extend(head_chunks)
frame_has_head_chunks = True
frame_chunks.append((type_, data))
elif type_ == "fdAT":
# convert to IDAT
if not frame_has_head_chunks:
frame_chunks.extend(head_chunks)
frame_has_head_chunks = True
frame_chunks.append(("IDAT", make_chunk("IDAT", data[12:-4])))
elif type_ == "IEND":
# end
frame_chunks.append(end)
frames.append((PNG.from_chunks(frame_chunks), control))
break
elif type_ in CHUNK_BEFORE_IDAT:
head_chunks.append((type_, data))
else:
frame_chunks.append((type_, data))
o = cls()
o.frames = frames
o.num_plays = num_plays
return o
@classmethod
def open(cls, file):
"""Open an APNG file.
:arg file: Input file.
:type file: path-like or file-like.
:rtype: APNG
"""
return cls.from_bytes(read_file(file))
def save(self, file):
"""Save the entire image to a file.
:arg file: Output file.
:type file: path-like or file-like
"""
write_file(file, self.to_bytes())
|
eight04/pyAPNG
|
apng/__init__.py
|
APNG.to_bytes
|
python
|
def to_bytes(self):
# grab the chunks we needs
out = [PNG_SIGN]
# FIXME: it's tricky to define "other_chunks". HoneyView stop the
# animation if it sees chunks other than fctl or idat, so we put other
# chunks to the end of the file
other_chunks = []
seq = 0
# for first frame
png, control = self.frames[0]
# header
out.append(png.hdr)
# acTL
out.append(make_chunk("acTL", struct.pack("!II", len(self.frames), self.num_plays)))
# fcTL
if control:
out.append(make_chunk("fcTL", struct.pack("!I", seq) + control.to_bytes()))
seq += 1
# and others...
idat_chunks = []
for type_, data in png.chunks:
if type_ in ("IHDR", "IEND"):
continue
if type_ == "IDAT":
# put at last
idat_chunks.append(data)
continue
out.append(data)
out.extend(idat_chunks)
# FIXME: we should do some optimization to frames...
# for other frames
for png, control in self.frames[1:]:
# fcTL
out.append(
make_chunk("fcTL", struct.pack("!I", seq) + control.to_bytes())
)
seq += 1
# and others...
for type_, data in png.chunks:
if type_ in ("IHDR", "IEND") or type_ in CHUNK_BEFORE_IDAT:
continue
elif type_ == "IDAT":
# convert IDAT to fdAT
out.append(
make_chunk("fdAT", struct.pack("!I", seq) + data[8:-4])
)
seq += 1
else:
other_chunks.append(data)
# end
out.extend(other_chunks)
out.append(png.end)
return b"".join(out)
|
Convert the entire image to bytes.
:rtype: bytes
|
train
|
https://github.com/eight04/pyAPNG/blob/b4d2927f7892a1de967b5cf57d434ed65f6a017e/apng/__init__.py#L345-L411
|
[
"def make_chunk(chunk_type, chunk_data):\n\t\"\"\"Create a raw chunk by composing chunk type and data. It\n\tcalculates chunk length and CRC for you.\n\n\t:arg str chunk_type: PNG chunk type.\n\t:arg bytes chunk_data: PNG chunk data, **excluding chunk length, type, and CRC**.\n\t:rtype: bytes\n\t\"\"\"\n\tout = struct.pack(\"!I\", len(chunk_data))\n\tchunk_data = chunk_type.encode(\"latin-1\") + chunk_data\n\tout += chunk_data + struct.pack(\"!I\", binascii.crc32(chunk_data) & 0xffffffff)\n\treturn out\n"
] |
class APNG:
"""Represent an APNG image."""
def __init__(self, num_plays=0):
"""An :class:`APNG` is composed by multiple :class:`PNG` s and
:class:`FrameControl`, which can be inserted with :meth:`append`.
:arg int num_plays: Number of times to loop. 0 = infinite.
:var frames: The frames of APNG.
:vartype frames: list[tuple(PNG, FrameControl)]
:var int num_plays: same as ``num_plays``.
"""
self.frames = []
self.num_plays = num_plays
def append(self, png, **options):
"""Append one frame.
:arg PNG png: Append a :class:`PNG` as a frame.
:arg dict options: The options for :class:`FrameControl`.
"""
if not isinstance(png, PNG):
raise TypeError("Expect an instance of `PNG` but got `{}`".format(png))
control = FrameControl(**options)
if control.width is None:
control.width = png.width
if control.height is None:
control.height = png.height
self.frames.append((png, control))
def append_file(self, file, **options):
"""Create a PNG from file and append the PNG as a frame.
:arg file: Input file.
:type file: path-like or file-like.
:arg dict options: The options for :class:`FrameControl`.
"""
self.append(PNG.open_any(file), **options)
def to_bytes(self):
"""Convert the entire image to bytes.
:rtype: bytes
"""
# grab the chunks we needs
out = [PNG_SIGN]
# FIXME: it's tricky to define "other_chunks". HoneyView stop the
# animation if it sees chunks other than fctl or idat, so we put other
# chunks to the end of the file
other_chunks = []
seq = 0
# for first frame
png, control = self.frames[0]
# header
out.append(png.hdr)
# acTL
out.append(make_chunk("acTL", struct.pack("!II", len(self.frames), self.num_plays)))
# fcTL
if control:
out.append(make_chunk("fcTL", struct.pack("!I", seq) + control.to_bytes()))
seq += 1
# and others...
idat_chunks = []
for type_, data in png.chunks:
if type_ in ("IHDR", "IEND"):
continue
if type_ == "IDAT":
# put at last
idat_chunks.append(data)
continue
out.append(data)
out.extend(idat_chunks)
# FIXME: we should do some optimization to frames...
# for other frames
for png, control in self.frames[1:]:
# fcTL
out.append(
make_chunk("fcTL", struct.pack("!I", seq) + control.to_bytes())
)
seq += 1
# and others...
for type_, data in png.chunks:
if type_ in ("IHDR", "IEND") or type_ in CHUNK_BEFORE_IDAT:
continue
elif type_ == "IDAT":
# convert IDAT to fdAT
out.append(
make_chunk("fdAT", struct.pack("!I", seq) + data[8:-4])
)
seq += 1
else:
other_chunks.append(data)
# end
out.extend(other_chunks)
out.append(png.end)
return b"".join(out)
@classmethod
def from_files(cls, files, **options):
"""Create an APNG from multiple files.
This is a shortcut of::
im = APNG()
for file in files:
im.append_file(file, **options)
:arg list files: A list of filename. See :meth:`PNG.open`.
:arg dict options: Options for :class:`FrameControl`.
:rtype: APNG
"""
im = cls()
for file in files:
im.append_file(file, **options)
return im
@classmethod
def from_bytes(cls, b):
"""Create an APNG from raw bytes.
:arg bytes b: The raw bytes of the APNG file.
:rtype: APNG
"""
hdr = None
head_chunks = []
end = ("IEND", make_chunk("IEND", b""))
frame_chunks = []
frames = []
num_plays = 0
frame_has_head_chunks = False
control = None
for type_, data in parse_chunks(b):
if type_ == "IHDR":
hdr = data
frame_chunks.append((type_, data))
elif type_ == "acTL":
_num_frames, num_plays = struct.unpack("!II", data[8:-4])
continue
elif type_ == "fcTL":
if any(type_ == "IDAT" for type_, data in frame_chunks):
# IDAT inside chunk, go to next frame
frame_chunks.append(end)
frames.append((PNG.from_chunks(frame_chunks), control))
frame_has_head_chunks = False
control = FrameControl.from_bytes(data[12:-4])
# https://github.com/PyCQA/pylint/issues/2072
# pylint: disable=typecheck
hdr = make_chunk("IHDR", struct.pack("!II", control.width, control.height) + hdr[16:-4])
frame_chunks = [("IHDR", hdr)]
else:
control = FrameControl.from_bytes(data[12:-4])
elif type_ == "IDAT":
if not frame_has_head_chunks:
frame_chunks.extend(head_chunks)
frame_has_head_chunks = True
frame_chunks.append((type_, data))
elif type_ == "fdAT":
# convert to IDAT
if not frame_has_head_chunks:
frame_chunks.extend(head_chunks)
frame_has_head_chunks = True
frame_chunks.append(("IDAT", make_chunk("IDAT", data[12:-4])))
elif type_ == "IEND":
# end
frame_chunks.append(end)
frames.append((PNG.from_chunks(frame_chunks), control))
break
elif type_ in CHUNK_BEFORE_IDAT:
head_chunks.append((type_, data))
else:
frame_chunks.append((type_, data))
o = cls()
o.frames = frames
o.num_plays = num_plays
return o
@classmethod
def open(cls, file):
"""Open an APNG file.
:arg file: Input file.
:type file: path-like or file-like.
:rtype: APNG
"""
return cls.from_bytes(read_file(file))
def save(self, file):
"""Save the entire image to a file.
:arg file: Output file.
:type file: path-like or file-like
"""
write_file(file, self.to_bytes())
|
eight04/pyAPNG
|
apng/__init__.py
|
APNG.from_files
|
python
|
def from_files(cls, files, **options):
im = cls()
for file in files:
im.append_file(file, **options)
return im
|
Create an APNG from multiple files.
This is a shortcut of::
im = APNG()
for file in files:
im.append_file(file, **options)
:arg list files: A list of filename. See :meth:`PNG.open`.
:arg dict options: Options for :class:`FrameControl`.
:rtype: APNG
|
train
|
https://github.com/eight04/pyAPNG/blob/b4d2927f7892a1de967b5cf57d434ed65f6a017e/apng/__init__.py#L414-L430
| null |
class APNG:
"""Represent an APNG image."""
def __init__(self, num_plays=0):
"""An :class:`APNG` is composed by multiple :class:`PNG` s and
:class:`FrameControl`, which can be inserted with :meth:`append`.
:arg int num_plays: Number of times to loop. 0 = infinite.
:var frames: The frames of APNG.
:vartype frames: list[tuple(PNG, FrameControl)]
:var int num_plays: same as ``num_plays``.
"""
self.frames = []
self.num_plays = num_plays
def append(self, png, **options):
"""Append one frame.
:arg PNG png: Append a :class:`PNG` as a frame.
:arg dict options: The options for :class:`FrameControl`.
"""
if not isinstance(png, PNG):
raise TypeError("Expect an instance of `PNG` but got `{}`".format(png))
control = FrameControl(**options)
if control.width is None:
control.width = png.width
if control.height is None:
control.height = png.height
self.frames.append((png, control))
def append_file(self, file, **options):
"""Create a PNG from file and append the PNG as a frame.
:arg file: Input file.
:type file: path-like or file-like.
:arg dict options: The options for :class:`FrameControl`.
"""
self.append(PNG.open_any(file), **options)
def to_bytes(self):
"""Convert the entire image to bytes.
:rtype: bytes
"""
# grab the chunks we needs
out = [PNG_SIGN]
# FIXME: it's tricky to define "other_chunks". HoneyView stop the
# animation if it sees chunks other than fctl or idat, so we put other
# chunks to the end of the file
other_chunks = []
seq = 0
# for first frame
png, control = self.frames[0]
# header
out.append(png.hdr)
# acTL
out.append(make_chunk("acTL", struct.pack("!II", len(self.frames), self.num_plays)))
# fcTL
if control:
out.append(make_chunk("fcTL", struct.pack("!I", seq) + control.to_bytes()))
seq += 1
# and others...
idat_chunks = []
for type_, data in png.chunks:
if type_ in ("IHDR", "IEND"):
continue
if type_ == "IDAT":
# put at last
idat_chunks.append(data)
continue
out.append(data)
out.extend(idat_chunks)
# FIXME: we should do some optimization to frames...
# for other frames
for png, control in self.frames[1:]:
# fcTL
out.append(
make_chunk("fcTL", struct.pack("!I", seq) + control.to_bytes())
)
seq += 1
# and others...
for type_, data in png.chunks:
if type_ in ("IHDR", "IEND") or type_ in CHUNK_BEFORE_IDAT:
continue
elif type_ == "IDAT":
# convert IDAT to fdAT
out.append(
make_chunk("fdAT", struct.pack("!I", seq) + data[8:-4])
)
seq += 1
else:
other_chunks.append(data)
# end
out.extend(other_chunks)
out.append(png.end)
return b"".join(out)
@classmethod
def from_files(cls, files, **options):
"""Create an APNG from multiple files.
This is a shortcut of::
im = APNG()
for file in files:
im.append_file(file, **options)
:arg list files: A list of filename. See :meth:`PNG.open`.
:arg dict options: Options for :class:`FrameControl`.
:rtype: APNG
"""
im = cls()
for file in files:
im.append_file(file, **options)
return im
@classmethod
def from_bytes(cls, b):
"""Create an APNG from raw bytes.
:arg bytes b: The raw bytes of the APNG file.
:rtype: APNG
"""
hdr = None
head_chunks = []
end = ("IEND", make_chunk("IEND", b""))
frame_chunks = []
frames = []
num_plays = 0
frame_has_head_chunks = False
control = None
for type_, data in parse_chunks(b):
if type_ == "IHDR":
hdr = data
frame_chunks.append((type_, data))
elif type_ == "acTL":
_num_frames, num_plays = struct.unpack("!II", data[8:-4])
continue
elif type_ == "fcTL":
if any(type_ == "IDAT" for type_, data in frame_chunks):
# IDAT inside chunk, go to next frame
frame_chunks.append(end)
frames.append((PNG.from_chunks(frame_chunks), control))
frame_has_head_chunks = False
control = FrameControl.from_bytes(data[12:-4])
# https://github.com/PyCQA/pylint/issues/2072
# pylint: disable=typecheck
hdr = make_chunk("IHDR", struct.pack("!II", control.width, control.height) + hdr[16:-4])
frame_chunks = [("IHDR", hdr)]
else:
control = FrameControl.from_bytes(data[12:-4])
elif type_ == "IDAT":
if not frame_has_head_chunks:
frame_chunks.extend(head_chunks)
frame_has_head_chunks = True
frame_chunks.append((type_, data))
elif type_ == "fdAT":
# convert to IDAT
if not frame_has_head_chunks:
frame_chunks.extend(head_chunks)
frame_has_head_chunks = True
frame_chunks.append(("IDAT", make_chunk("IDAT", data[12:-4])))
elif type_ == "IEND":
# end
frame_chunks.append(end)
frames.append((PNG.from_chunks(frame_chunks), control))
break
elif type_ in CHUNK_BEFORE_IDAT:
head_chunks.append((type_, data))
else:
frame_chunks.append((type_, data))
o = cls()
o.frames = frames
o.num_plays = num_plays
return o
@classmethod
def open(cls, file):
"""Open an APNG file.
:arg file: Input file.
:type file: path-like or file-like.
:rtype: APNG
"""
return cls.from_bytes(read_file(file))
def save(self, file):
"""Save the entire image to a file.
:arg file: Output file.
:type file: path-like or file-like
"""
write_file(file, self.to_bytes())
|
eight04/pyAPNG
|
apng/__init__.py
|
APNG.from_bytes
|
python
|
def from_bytes(cls, b):
hdr = None
head_chunks = []
end = ("IEND", make_chunk("IEND", b""))
frame_chunks = []
frames = []
num_plays = 0
frame_has_head_chunks = False
control = None
for type_, data in parse_chunks(b):
if type_ == "IHDR":
hdr = data
frame_chunks.append((type_, data))
elif type_ == "acTL":
_num_frames, num_plays = struct.unpack("!II", data[8:-4])
continue
elif type_ == "fcTL":
if any(type_ == "IDAT" for type_, data in frame_chunks):
# IDAT inside chunk, go to next frame
frame_chunks.append(end)
frames.append((PNG.from_chunks(frame_chunks), control))
frame_has_head_chunks = False
control = FrameControl.from_bytes(data[12:-4])
# https://github.com/PyCQA/pylint/issues/2072
# pylint: disable=typecheck
hdr = make_chunk("IHDR", struct.pack("!II", control.width, control.height) + hdr[16:-4])
frame_chunks = [("IHDR", hdr)]
else:
control = FrameControl.from_bytes(data[12:-4])
elif type_ == "IDAT":
if not frame_has_head_chunks:
frame_chunks.extend(head_chunks)
frame_has_head_chunks = True
frame_chunks.append((type_, data))
elif type_ == "fdAT":
# convert to IDAT
if not frame_has_head_chunks:
frame_chunks.extend(head_chunks)
frame_has_head_chunks = True
frame_chunks.append(("IDAT", make_chunk("IDAT", data[12:-4])))
elif type_ == "IEND":
# end
frame_chunks.append(end)
frames.append((PNG.from_chunks(frame_chunks), control))
break
elif type_ in CHUNK_BEFORE_IDAT:
head_chunks.append((type_, data))
else:
frame_chunks.append((type_, data))
o = cls()
o.frames = frames
o.num_plays = num_plays
return o
|
Create an APNG from raw bytes.
:arg bytes b: The raw bytes of the APNG file.
:rtype: APNG
|
train
|
https://github.com/eight04/pyAPNG/blob/b4d2927f7892a1de967b5cf57d434ed65f6a017e/apng/__init__.py#L433-L494
|
[
"def parse_chunks(b):\n\t\"\"\"Parse PNG bytes into multiple chunks. \n\n\t:arg bytes b: The raw bytes of the PNG file.\n\t:return: A generator yielding :class:`Chunk`.\n\t:rtype: Iterator[Chunk]\n\t\"\"\"\n\t# skip signature\n\ti = 8\n\t# yield chunks\n\twhile i < len(b):\n\t\tdata_len, = struct.unpack(\"!I\", b[i:i+4])\n\t\ttype_ = b[i+4:i+8].decode(\"latin-1\")\n\t\tyield Chunk(type_, b[i:i+data_len+12])\n\t\ti += data_len + 12\n",
"def make_chunk(chunk_type, chunk_data):\n\t\"\"\"Create a raw chunk by composing chunk type and data. It\n\tcalculates chunk length and CRC for you.\n\n\t:arg str chunk_type: PNG chunk type.\n\t:arg bytes chunk_data: PNG chunk data, **excluding chunk length, type, and CRC**.\n\t:rtype: bytes\n\t\"\"\"\n\tout = struct.pack(\"!I\", len(chunk_data))\n\tchunk_data = chunk_type.encode(\"latin-1\") + chunk_data\n\tout += chunk_data + struct.pack(\"!I\", binascii.crc32(chunk_data) & 0xffffffff)\n\treturn out\n",
"def from_chunks(cls, chunks):\n\t\"\"\"Construct PNG from raw chunks.\n\n\t:arg chunks: A list of ``(chunk_type, chunk_raw_data)``. Also see\n\t\t:func:`chunks`.\n\t:type chunks: list[tuple(str, bytes)]\n\t\"\"\"\n\tim = cls()\n\tim.chunks = chunks\n\tim.init()\n\treturn im\n",
"def from_bytes(cls, b):\n\t\"\"\"Contruct fcTL info from bytes.\n\n\t:arg bytes b: The length of ``b`` must be *28*, excluding sequence\n\t\tnumber and CRC.\n\t\"\"\"\n\treturn cls(*struct.unpack(\"!IIIIHHbb\", b))\n"
] |
class APNG:
"""Represent an APNG image."""
def __init__(self, num_plays=0):
"""An :class:`APNG` is composed by multiple :class:`PNG` s and
:class:`FrameControl`, which can be inserted with :meth:`append`.
:arg int num_plays: Number of times to loop. 0 = infinite.
:var frames: The frames of APNG.
:vartype frames: list[tuple(PNG, FrameControl)]
:var int num_plays: same as ``num_plays``.
"""
self.frames = []
self.num_plays = num_plays
def append(self, png, **options):
"""Append one frame.
:arg PNG png: Append a :class:`PNG` as a frame.
:arg dict options: The options for :class:`FrameControl`.
"""
if not isinstance(png, PNG):
raise TypeError("Expect an instance of `PNG` but got `{}`".format(png))
control = FrameControl(**options)
if control.width is None:
control.width = png.width
if control.height is None:
control.height = png.height
self.frames.append((png, control))
def append_file(self, file, **options):
"""Create a PNG from file and append the PNG as a frame.
:arg file: Input file.
:type file: path-like or file-like.
:arg dict options: The options for :class:`FrameControl`.
"""
self.append(PNG.open_any(file), **options)
def to_bytes(self):
"""Convert the entire image to bytes.
:rtype: bytes
"""
# grab the chunks we needs
out = [PNG_SIGN]
# FIXME: it's tricky to define "other_chunks". HoneyView stop the
# animation if it sees chunks other than fctl or idat, so we put other
# chunks to the end of the file
other_chunks = []
seq = 0
# for first frame
png, control = self.frames[0]
# header
out.append(png.hdr)
# acTL
out.append(make_chunk("acTL", struct.pack("!II", len(self.frames), self.num_plays)))
# fcTL
if control:
out.append(make_chunk("fcTL", struct.pack("!I", seq) + control.to_bytes()))
seq += 1
# and others...
idat_chunks = []
for type_, data in png.chunks:
if type_ in ("IHDR", "IEND"):
continue
if type_ == "IDAT":
# put at last
idat_chunks.append(data)
continue
out.append(data)
out.extend(idat_chunks)
# FIXME: we should do some optimization to frames...
# for other frames
for png, control in self.frames[1:]:
# fcTL
out.append(
make_chunk("fcTL", struct.pack("!I", seq) + control.to_bytes())
)
seq += 1
# and others...
for type_, data in png.chunks:
if type_ in ("IHDR", "IEND") or type_ in CHUNK_BEFORE_IDAT:
continue
elif type_ == "IDAT":
# convert IDAT to fdAT
out.append(
make_chunk("fdAT", struct.pack("!I", seq) + data[8:-4])
)
seq += 1
else:
other_chunks.append(data)
# end
out.extend(other_chunks)
out.append(png.end)
return b"".join(out)
@classmethod
def from_files(cls, files, **options):
"""Create an APNG from multiple files.
This is a shortcut of::
im = APNG()
for file in files:
im.append_file(file, **options)
:arg list files: A list of filename. See :meth:`PNG.open`.
:arg dict options: Options for :class:`FrameControl`.
:rtype: APNG
"""
im = cls()
for file in files:
im.append_file(file, **options)
return im
@classmethod
def from_bytes(cls, b):
"""Create an APNG from raw bytes.
:arg bytes b: The raw bytes of the APNG file.
:rtype: APNG
"""
hdr = None
head_chunks = []
end = ("IEND", make_chunk("IEND", b""))
frame_chunks = []
frames = []
num_plays = 0
frame_has_head_chunks = False
control = None
for type_, data in parse_chunks(b):
if type_ == "IHDR":
hdr = data
frame_chunks.append((type_, data))
elif type_ == "acTL":
_num_frames, num_plays = struct.unpack("!II", data[8:-4])
continue
elif type_ == "fcTL":
if any(type_ == "IDAT" for type_, data in frame_chunks):
# IDAT inside chunk, go to next frame
frame_chunks.append(end)
frames.append((PNG.from_chunks(frame_chunks), control))
frame_has_head_chunks = False
control = FrameControl.from_bytes(data[12:-4])
# https://github.com/PyCQA/pylint/issues/2072
# pylint: disable=typecheck
hdr = make_chunk("IHDR", struct.pack("!II", control.width, control.height) + hdr[16:-4])
frame_chunks = [("IHDR", hdr)]
else:
control = FrameControl.from_bytes(data[12:-4])
elif type_ == "IDAT":
if not frame_has_head_chunks:
frame_chunks.extend(head_chunks)
frame_has_head_chunks = True
frame_chunks.append((type_, data))
elif type_ == "fdAT":
# convert to IDAT
if not frame_has_head_chunks:
frame_chunks.extend(head_chunks)
frame_has_head_chunks = True
frame_chunks.append(("IDAT", make_chunk("IDAT", data[12:-4])))
elif type_ == "IEND":
# end
frame_chunks.append(end)
frames.append((PNG.from_chunks(frame_chunks), control))
break
elif type_ in CHUNK_BEFORE_IDAT:
head_chunks.append((type_, data))
else:
frame_chunks.append((type_, data))
o = cls()
o.frames = frames
o.num_plays = num_plays
return o
@classmethod
def open(cls, file):
"""Open an APNG file.
:arg file: Input file.
:type file: path-like or file-like.
:rtype: APNG
"""
return cls.from_bytes(read_file(file))
def save(self, file):
"""Save the entire image to a file.
:arg file: Output file.
:type file: path-like or file-like
"""
write_file(file, self.to_bytes())
|
eight04/pyAPNG
|
cute.py
|
readme
|
python
|
def readme():
from livereload import Server
server = Server()
server.watch("README.rst", "py cute.py readme_build")
server.serve(open_url_delay=1, root="build/readme")
|
Live reload readme
|
train
|
https://github.com/eight04/pyAPNG/blob/b4d2927f7892a1de967b5cf57d434ed65f6a017e/cute.py#L6-L11
| null |
# https://github.com/PyCQA/pylint/issues/1368
# pylint: disable=bad-whitespace
import sys
from xcute import cute, Skip
IS_LATEST = sys.version_info[:2] == (3, 6)
cute(
pkg_name = "apng",
lint = Skip("pylint cute.py test apng", sys.version_info < (3, )),
test = [
"lint",
"pytest -x test",
"readme_build"
],
bump_pre = 'test',
bump_post = ['dist', 'release', 'publish', 'install'],
clean = "x-clean build dist",
dist = [
"clean",
"python setup.py sdist bdist_wheel"
],
release = [
'git add .',
'git commit -m "Release v{version}"',
'git tag -a v{version} -m "Release v{version}"'
],
publish = [
'twine upload dist/*',
'git push --follow-tags'
],
install = 'python -m pip install -e .',
readme_build = [
'python setup.py --long-description | x-pipe build/readme/index.rst',
'rst2html5.py --no-raw --exit-status=1 --verbose '
'build/readme/index.rst build/readme/index.html'
],
readme_pre = "readme_build",
readme = readme,
doc = 'sphinx-autobuild -B -z {pkg_name} docs docs/build'
)
|
ui/django-post_office
|
post_office/models.py
|
get_upload_path
|
python
|
def get_upload_path(instance, filename):
if not instance.name:
instance.name = filename # set original filename
date = timezone.now().date()
filename = '{name}.{ext}'.format(name=uuid4().hex,
ext=filename.split('.')[-1])
return os.path.join('post_office_attachments', str(date.year),
str(date.month), str(date.day), filename)
|
Overriding to store the original filename
|
train
|
https://github.com/ui/django-post_office/blob/03e1ffb69829b475402f0f3ecd9f8a90af7da4bd/post_office/models.py#L274-L283
| null |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import os
from collections import namedtuple
from uuid import uuid4
from email.mime.nonmultipart import MIMENonMultipart
from django.core.mail import EmailMessage, EmailMultiAlternatives
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import pgettext_lazy, ugettext_lazy as _
from django.utils import timezone
from jsonfield import JSONField
from post_office import cache
from post_office.fields import CommaSeparatedEmailField
from .compat import text_type, smart_text
from .connections import connections
from .settings import context_field_class, get_log_level, get_template_engine
from .validators import validate_email_with_name, validate_template_syntax
PRIORITY = namedtuple('PRIORITY', 'low medium high now')._make(range(4))
STATUS = namedtuple('STATUS', 'sent failed queued')._make(range(3))
@python_2_unicode_compatible
class Email(models.Model):
"""
A model to hold email information.
"""
PRIORITY_CHOICES = [(PRIORITY.low, _("low")), (PRIORITY.medium, _("medium")),
(PRIORITY.high, _("high")), (PRIORITY.now, _("now"))]
STATUS_CHOICES = [(STATUS.sent, _("sent")), (STATUS.failed, _("failed")),
(STATUS.queued, _("queued"))]
from_email = models.CharField(_("Email From"), max_length=254,
validators=[validate_email_with_name])
to = CommaSeparatedEmailField(_("Email To"))
cc = CommaSeparatedEmailField(_("Cc"))
bcc = CommaSeparatedEmailField(_("Bcc"))
subject = models.CharField(_("Subject"), max_length=989, blank=True)
message = models.TextField(_("Message"), blank=True)
html_message = models.TextField(_("HTML Message"), blank=True)
"""
Emails with 'queued' status will get processed by ``send_queued`` command.
Status field will then be set to ``failed`` or ``sent`` depending on
whether it's successfully delivered.
"""
status = models.PositiveSmallIntegerField(
_("Status"),
choices=STATUS_CHOICES, db_index=True,
blank=True, null=True)
priority = models.PositiveSmallIntegerField(_("Priority"),
choices=PRIORITY_CHOICES,
blank=True, null=True)
created = models.DateTimeField(auto_now_add=True, db_index=True)
last_updated = models.DateTimeField(db_index=True, auto_now=True)
scheduled_time = models.DateTimeField(_('The scheduled sending time'),
blank=True, null=True, db_index=True)
headers = JSONField(_('Headers'), blank=True, null=True)
template = models.ForeignKey('post_office.EmailTemplate', blank=True,
null=True, verbose_name=_('Email template'),
on_delete=models.CASCADE)
context = context_field_class(_('Context'), blank=True, null=True)
backend_alias = models.CharField(_('Backend alias'), blank=True, default='',
max_length=64)
class Meta:
app_label = 'post_office'
verbose_name = pgettext_lazy("Email address", "Email")
verbose_name_plural = pgettext_lazy("Email addresses", "Emails")
def __init__(self, *args, **kwargs):
super(Email, self).__init__(*args, **kwargs)
self._cached_email_message = None
def __str__(self):
return u'%s' % self.to
def email_message(self):
"""
Returns Django EmailMessage object for sending.
"""
if self._cached_email_message:
return self._cached_email_message
return self.prepare_email_message()
def prepare_email_message(self):
"""
Returns a django ``EmailMessage`` or ``EmailMultiAlternatives`` object,
depending on whether html_message is empty.
"""
if self.template is not None:
engine = get_template_engine()
subject = engine.from_string(self.template.subject).render(self.context)
plaintext_message = engine.from_string(self.template.content).render(self.context)
multipart_template = engine.from_string(self.template.html_content)
html_message = multipart_template.render(self.context)
else:
subject = smart_text(self.subject)
plaintext_message = self.message
multipart_template = None
html_message = self.html_message
connection = connections[self.backend_alias or 'default']
if html_message:
if plaintext_message:
msg = EmailMultiAlternatives(
subject=subject, body=plaintext_message, from_email=self.from_email,
to=self.to, bcc=self.bcc, cc=self.cc,
headers=self.headers, connection=connection)
msg.attach_alternative(html_message, "text/html")
else:
msg = EmailMultiAlternatives(
subject=subject, body=html_message, from_email=self.from_email,
to=self.to, bcc=self.bcc, cc=self.cc,
headers=self.headers, connection=connection)
msg.content_subtype = 'html'
if hasattr(multipart_template, 'attach_related'):
multipart_template.attach_related(msg)
else:
msg = EmailMessage(
subject=subject, body=plaintext_message, from_email=self.from_email,
to=self.to, bcc=self.bcc, cc=self.cc,
headers=self.headers, connection=connection)
for attachment in self.attachments.all():
if attachment.headers:
mime_part = MIMENonMultipart(*attachment.mimetype.split('/'))
mime_part.set_payload(attachment.file.read())
for key, val in attachment.headers.items():
try:
mime_part.replace_header(key, val)
except KeyError:
mime_part.add_header(key, val)
msg.attach(mime_part)
else:
msg.attach(attachment.name, attachment.file.read(), mimetype=attachment.mimetype or None)
attachment.file.close()
self._cached_email_message = msg
return msg
def dispatch(self, log_level=None,
disconnect_after_delivery=True, commit=True):
"""
Sends email and log the result.
"""
try:
self.email_message().send()
status = STATUS.sent
message = ''
exception_type = ''
except Exception as e:
status = STATUS.failed
message = str(e)
exception_type = type(e).__name__
# If run in a bulk sending mode, reraise and let the outer
# layer handle the exception
if not commit:
raise
if commit:
self.status = status
self.save(update_fields=['status'])
if log_level is None:
log_level = get_log_level()
# If log level is 0, log nothing, 1 logs only sending failures
# and 2 means log both successes and failures
if log_level == 1:
if status == STATUS.failed:
self.logs.create(status=status, message=message,
exception_type=exception_type)
elif log_level == 2:
self.logs.create(status=status, message=message,
exception_type=exception_type)
return status
def save(self, *args, **kwargs):
self.full_clean()
return super(Email, self).save(*args, **kwargs)
@python_2_unicode_compatible
class Log(models.Model):
"""
A model to record sending email sending activities.
"""
STATUS_CHOICES = [(STATUS.sent, _("sent")), (STATUS.failed, _("failed"))]
email = models.ForeignKey(Email, editable=False, related_name='logs',
verbose_name=_('Email address'), on_delete=models.CASCADE)
date = models.DateTimeField(auto_now_add=True)
status = models.PositiveSmallIntegerField(_('Status'), choices=STATUS_CHOICES)
exception_type = models.CharField(_('Exception type'), max_length=255, blank=True)
message = models.TextField(_('Message'))
class Meta:
app_label = 'post_office'
verbose_name = _("Log")
verbose_name_plural = _("Logs")
def __str__(self):
return text_type(self.date)
class EmailTemplateManager(models.Manager):
def get_by_natural_key(self, name, language, default_template):
return self.get(name=name, language=language, default_template=default_template)
@python_2_unicode_compatible
class EmailTemplate(models.Model):
"""
Model to hold template information from db
"""
name = models.CharField(_('Name'), max_length=255, help_text=_("e.g: 'welcome_email'"))
description = models.TextField(_('Description'), blank=True,
help_text=_("Description of this template."))
created = models.DateTimeField(auto_now_add=True)
last_updated = models.DateTimeField(auto_now=True)
subject = models.CharField(max_length=255, blank=True,
verbose_name=_("Subject"), validators=[validate_template_syntax])
content = models.TextField(blank=True,
verbose_name=_("Content"), validators=[validate_template_syntax])
html_content = models.TextField(blank=True,
verbose_name=_("HTML content"), validators=[validate_template_syntax])
language = models.CharField(max_length=12,
verbose_name=_("Language"),
help_text=_("Render template in alternative language"),
default='', blank=True)
default_template = models.ForeignKey('self', related_name='translated_templates',
null=True, default=None, verbose_name=_('Default template'), on_delete=models.CASCADE)
objects = EmailTemplateManager()
class Meta:
app_label = 'post_office'
unique_together = ('name', 'language', 'default_template')
verbose_name = _("Email Template")
verbose_name_plural = _("Email Templates")
ordering = ['name']
def __str__(self):
return u'%s %s' % (self.name, self.language)
def natural_key(self):
return (self.name, self.language, self.default_template)
def save(self, *args, **kwargs):
# If template is a translation, use default template's name
if self.default_template and not self.name:
self.name = self.default_template.name
template = super(EmailTemplate, self).save(*args, **kwargs)
cache.delete(self.name)
return template
@python_2_unicode_compatible
class Attachment(models.Model):
"""
A model describing an email attachment.
"""
file = models.FileField(_('File'), upload_to=get_upload_path)
name = models.CharField(_('Name'), max_length=255, help_text=_("The original filename"))
emails = models.ManyToManyField(Email, related_name='attachments',
verbose_name=_('Email addresses'))
mimetype = models.CharField(max_length=255, default='', blank=True)
headers = JSONField(_('Headers'), blank=True, null=True)
class Meta:
app_label = 'post_office'
verbose_name = _("Attachment")
verbose_name_plural = _("Attachments")
def __str__(self):
return self.name
|
ui/django-post_office
|
post_office/fields.py
|
CommaSeparatedEmailField.get_prep_value
|
python
|
def get_prep_value(self, value):
if isinstance(value, six.string_types):
return value
else:
return ', '.join(map(lambda s: s.strip(), value))
|
We need to accomodate queries where a single email,
or list of email addresses is supplied as arguments. For example:
- Email.objects.filter(to='mail@example.com')
- Email.objects.filter(to=['one@example.com', 'two@example.com'])
|
train
|
https://github.com/ui/django-post_office/blob/03e1ffb69829b475402f0f3ecd9f8a90af7da4bd/post_office/fields.py#L28-L39
| null |
class CommaSeparatedEmailField(TextField):
default_validators = [validate_comma_separated_emails]
description = _("Comma-separated emails")
def __init__(self, *args, **kwargs):
kwargs['blank'] = True
super(CommaSeparatedEmailField, self).__init__(*args, **kwargs)
def formfield(self, **kwargs):
defaults = {
'error_messages': {
'invalid': _('Only comma separated emails are allowed.'),
}
}
defaults.update(kwargs)
return super(CommaSeparatedEmailField, self).formfield(**defaults)
def from_db_value(self, value, *args, **kwargs):
return self.to_python(value)
def to_python(self, value):
if isinstance(value, six.string_types):
if value == '':
return []
else:
return [s.strip() for s in value.split(',')]
else:
return value
def south_field_triple(self):
"""
Return a suitable description of this field for South.
Taken from smiley chris' easy_thumbnails
"""
from south.modelsinspector import introspector
field_class = 'django.db.models.fields.TextField'
args, kwargs = introspector(self)
return (field_class, args, kwargs)
|
ui/django-post_office
|
post_office/mail.py
|
create
|
python
|
def create(sender, recipients=None, cc=None, bcc=None, subject='', message='',
html_message='', context=None, scheduled_time=None, headers=None,
template=None, priority=None, render_on_delivery=False, commit=True,
backend=''):
priority = parse_priority(priority)
status = None if priority == PRIORITY.now else STATUS.queued
if recipients is None:
recipients = []
if cc is None:
cc = []
if bcc is None:
bcc = []
if context is None:
context = ''
# If email is to be rendered during delivery, save all necessary
# information
if render_on_delivery:
email = Email(
from_email=sender,
to=recipients,
cc=cc,
bcc=bcc,
scheduled_time=scheduled_time,
headers=headers, priority=priority, status=status,
context=context, template=template, backend_alias=backend
)
else:
if template:
subject = template.subject
message = template.content
html_message = template.html_content
_context = Context(context or {})
subject = Template(subject).render(_context)
message = Template(message).render(_context)
html_message = Template(html_message).render(_context)
email = Email(
from_email=sender,
to=recipients,
cc=cc,
bcc=bcc,
subject=subject,
message=message,
html_message=html_message,
scheduled_time=scheduled_time,
headers=headers, priority=priority, status=status,
backend_alias=backend
)
if commit:
email.save()
return email
|
Creates an email from supplied keyword arguments. If template is
specified, email subject and content will be rendered during delivery.
|
train
|
https://github.com/ui/django-post_office/blob/03e1ffb69829b475402f0f3ecd9f8a90af7da4bd/post_office/mail.py#L23-L84
|
[
"def parse_priority(priority):\n if priority is None:\n priority = get_default_priority()\n # If priority is given as a string, returns the enum representation\n if isinstance(priority, string_types):\n priority = getattr(PRIORITY, priority, None)\n\n if priority is None:\n raise ValueError('Invalid priority, must be one of: %s' %\n ', '.join(PRIORITY._fields))\n return priority\n"
] |
from multiprocessing import Pool
from multiprocessing.dummy import Pool as ThreadPool
from django.conf import settings
from django.core.exceptions import ValidationError
from django.db import connection as db_connection
from django.db.models import Q
from django.template import Context, Template
from django.utils.timezone import now
from .connections import connections
from .models import Email, EmailTemplate, Log, PRIORITY, STATUS
from .settings import (get_available_backends, get_batch_size,
get_log_level, get_sending_order, get_threads_per_process)
from .utils import (get_email_template, parse_emails, parse_priority,
split_emails, create_attachments)
from .logutils import setup_loghandlers
logger = setup_loghandlers("INFO")
def send(recipients=None, sender=None, template=None, context=None, subject='',
message='', html_message='', scheduled_time=None, headers=None,
priority=None, attachments=None, render_on_delivery=False,
log_level=None, commit=True, cc=None, bcc=None, language='',
backend=''):
try:
recipients = parse_emails(recipients)
except ValidationError as e:
raise ValidationError('recipients: %s' % e.message)
try:
cc = parse_emails(cc)
except ValidationError as e:
raise ValidationError('c: %s' % e.message)
try:
bcc = parse_emails(bcc)
except ValidationError as e:
raise ValidationError('bcc: %s' % e.message)
if sender is None:
sender = settings.DEFAULT_FROM_EMAIL
priority = parse_priority(priority)
if log_level is None:
log_level = get_log_level()
if not commit:
if priority == PRIORITY.now:
raise ValueError("send_many() can't be used with priority = 'now'")
if attachments:
raise ValueError("Can't add attachments with send_many()")
if template:
if subject:
raise ValueError('You can\'t specify both "template" and "subject" arguments')
if message:
raise ValueError('You can\'t specify both "template" and "message" arguments')
if html_message:
raise ValueError('You can\'t specify both "template" and "html_message" arguments')
# template can be an EmailTemplate instance or name
if isinstance(template, EmailTemplate):
template = template
# If language is specified, ensure template uses the right language
if language:
if template.language != language:
template = template.translated_templates.get(language=language)
else:
template = get_email_template(template, language)
if backend and backend not in get_available_backends().keys():
raise ValueError('%s is not a valid backend alias' % backend)
email = create(sender, recipients, cc, bcc, subject, message, html_message,
context, scheduled_time, headers, template, priority,
render_on_delivery, commit=commit, backend=backend)
if attachments:
attachments = create_attachments(attachments)
email.attachments.add(*attachments)
if priority == PRIORITY.now:
email.dispatch(log_level=log_level)
return email
def send_many(kwargs_list):
"""
Similar to mail.send(), but this function accepts a list of kwargs.
Internally, it uses Django's bulk_create command for efficiency reasons.
Currently send_many() can't be used to send emails with priority = 'now'.
"""
emails = []
for kwargs in kwargs_list:
emails.append(send(commit=False, **kwargs))
Email.objects.bulk_create(emails)
def get_queued():
"""
Returns a list of emails that should be sent:
- Status is queued
- Has scheduled_time lower than the current time or None
"""
return Email.objects.filter(status=STATUS.queued) \
.select_related('template') \
.filter(Q(scheduled_time__lte=now()) | Q(scheduled_time=None)) \
.order_by(*get_sending_order()).prefetch_related('attachments')[:get_batch_size()]
def send_queued(processes=1, log_level=None):
"""
Sends out all queued mails that has scheduled_time less than now or None
"""
queued_emails = get_queued()
total_sent, total_failed = 0, 0
total_email = len(queued_emails)
logger.info('Started sending %s emails with %s processes.' %
(total_email, processes))
if log_level is None:
log_level = get_log_level()
if queued_emails:
# Don't use more processes than number of emails
if total_email < processes:
processes = total_email
if processes == 1:
total_sent, total_failed = _send_bulk(queued_emails,
uses_multiprocessing=False,
log_level=log_level)
else:
email_lists = split_emails(queued_emails, processes)
pool = Pool(processes)
results = pool.map(_send_bulk, email_lists)
pool.terminate()
total_sent = sum([result[0] for result in results])
total_failed = sum([result[1] for result in results])
message = '%s emails attempted, %s sent, %s failed' % (
total_email,
total_sent,
total_failed
)
logger.info(message)
return (total_sent, total_failed)
def _send_bulk(emails, uses_multiprocessing=True, log_level=None):
# Multiprocessing does not play well with database connection
# Fix: Close connections on forking process
# https://groups.google.com/forum/#!topic/django-users/eCAIY9DAfG0
if uses_multiprocessing:
db_connection.close()
if log_level is None:
log_level = get_log_level()
sent_emails = []
failed_emails = [] # This is a list of two tuples (email, exception)
email_count = len(emails)
logger.info('Process started, sending %s emails' % email_count)
def send(email):
try:
email.dispatch(log_level=log_level, commit=False,
disconnect_after_delivery=False)
sent_emails.append(email)
logger.debug('Successfully sent email #%d' % email.id)
except Exception as e:
logger.debug('Failed to send email #%d' % email.id)
failed_emails.append((email, e))
# Prepare emails before we send these to threads for sending
# So we don't need to access the DB from within threads
for email in emails:
# Sometimes this can fail, for example when trying to render
# email from a faulty Django template
try:
email.prepare_email_message()
except Exception as e:
failed_emails.append((email, e))
number_of_threads = min(get_threads_per_process(), email_count)
pool = ThreadPool(number_of_threads)
pool.map(send, emails)
pool.close()
pool.join()
connections.close()
# Update statuses of sent and failed emails
email_ids = [email.id for email in sent_emails]
Email.objects.filter(id__in=email_ids).update(status=STATUS.sent)
email_ids = [email.id for (email, e) in failed_emails]
Email.objects.filter(id__in=email_ids).update(status=STATUS.failed)
# If log level is 0, log nothing, 1 logs only sending failures
# and 2 means log both successes and failures
if log_level >= 1:
logs = []
for (email, exception) in failed_emails:
logs.append(
Log(email=email, status=STATUS.failed,
message=str(exception),
exception_type=type(exception).__name__)
)
if logs:
Log.objects.bulk_create(logs)
if log_level == 2:
logs = []
for email in sent_emails:
logs.append(Log(email=email, status=STATUS.sent))
if logs:
Log.objects.bulk_create(logs)
logger.info(
'Process finished, %s attempted, %s sent, %s failed' % (
email_count, len(sent_emails), len(failed_emails)
)
)
return len(sent_emails), len(failed_emails)
|
ui/django-post_office
|
post_office/mail.py
|
send_many
|
python
|
def send_many(kwargs_list):
emails = []
for kwargs in kwargs_list:
emails.append(send(commit=False, **kwargs))
Email.objects.bulk_create(emails)
|
Similar to mail.send(), but this function accepts a list of kwargs.
Internally, it uses Django's bulk_create command for efficiency reasons.
Currently send_many() can't be used to send emails with priority = 'now'.
|
train
|
https://github.com/ui/django-post_office/blob/03e1ffb69829b475402f0f3ecd9f8a90af7da4bd/post_office/mail.py#L157-L166
|
[
"def send(recipients=None, sender=None, template=None, context=None, subject='',\n message='', html_message='', scheduled_time=None, headers=None,\n priority=None, attachments=None, render_on_delivery=False,\n log_level=None, commit=True, cc=None, bcc=None, language='',\n backend=''):\n\n try:\n recipients = parse_emails(recipients)\n except ValidationError as e:\n raise ValidationError('recipients: %s' % e.message)\n\n try:\n cc = parse_emails(cc)\n except ValidationError as e:\n raise ValidationError('c: %s' % e.message)\n\n try:\n bcc = parse_emails(bcc)\n except ValidationError as e:\n raise ValidationError('bcc: %s' % e.message)\n\n if sender is None:\n sender = settings.DEFAULT_FROM_EMAIL\n\n priority = parse_priority(priority)\n\n if log_level is None:\n log_level = get_log_level()\n\n if not commit:\n if priority == PRIORITY.now:\n raise ValueError(\"send_many() can't be used with priority = 'now'\")\n if attachments:\n raise ValueError(\"Can't add attachments with send_many()\")\n\n if template:\n if subject:\n raise ValueError('You can\\'t specify both \"template\" and \"subject\" arguments')\n if message:\n raise ValueError('You can\\'t specify both \"template\" and \"message\" arguments')\n if html_message:\n raise ValueError('You can\\'t specify both \"template\" and \"html_message\" arguments')\n\n # template can be an EmailTemplate instance or name\n if isinstance(template, EmailTemplate):\n template = template\n # If language is specified, ensure template uses the right language\n if language:\n if template.language != language:\n template = template.translated_templates.get(language=language)\n else:\n template = get_email_template(template, language)\n\n if backend and backend not in get_available_backends().keys():\n raise ValueError('%s is not a valid backend alias' % backend)\n\n email = create(sender, recipients, cc, bcc, subject, message, html_message,\n context, scheduled_time, headers, template, priority,\n render_on_delivery, commit=commit, backend=backend)\n\n if attachments:\n attachments = create_attachments(attachments)\n email.attachments.add(*attachments)\n\n if priority == PRIORITY.now:\n email.dispatch(log_level=log_level)\n\n return email\n"
] |
from multiprocessing import Pool
from multiprocessing.dummy import Pool as ThreadPool
from django.conf import settings
from django.core.exceptions import ValidationError
from django.db import connection as db_connection
from django.db.models import Q
from django.template import Context, Template
from django.utils.timezone import now
from .connections import connections
from .models import Email, EmailTemplate, Log, PRIORITY, STATUS
from .settings import (get_available_backends, get_batch_size,
get_log_level, get_sending_order, get_threads_per_process)
from .utils import (get_email_template, parse_emails, parse_priority,
split_emails, create_attachments)
from .logutils import setup_loghandlers
logger = setup_loghandlers("INFO")
def create(sender, recipients=None, cc=None, bcc=None, subject='', message='',
html_message='', context=None, scheduled_time=None, headers=None,
template=None, priority=None, render_on_delivery=False, commit=True,
backend=''):
"""
Creates an email from supplied keyword arguments. If template is
specified, email subject and content will be rendered during delivery.
"""
priority = parse_priority(priority)
status = None if priority == PRIORITY.now else STATUS.queued
if recipients is None:
recipients = []
if cc is None:
cc = []
if bcc is None:
bcc = []
if context is None:
context = ''
# If email is to be rendered during delivery, save all necessary
# information
if render_on_delivery:
email = Email(
from_email=sender,
to=recipients,
cc=cc,
bcc=bcc,
scheduled_time=scheduled_time,
headers=headers, priority=priority, status=status,
context=context, template=template, backend_alias=backend
)
else:
if template:
subject = template.subject
message = template.content
html_message = template.html_content
_context = Context(context or {})
subject = Template(subject).render(_context)
message = Template(message).render(_context)
html_message = Template(html_message).render(_context)
email = Email(
from_email=sender,
to=recipients,
cc=cc,
bcc=bcc,
subject=subject,
message=message,
html_message=html_message,
scheduled_time=scheduled_time,
headers=headers, priority=priority, status=status,
backend_alias=backend
)
if commit:
email.save()
return email
def send(recipients=None, sender=None, template=None, context=None, subject='',
message='', html_message='', scheduled_time=None, headers=None,
priority=None, attachments=None, render_on_delivery=False,
log_level=None, commit=True, cc=None, bcc=None, language='',
backend=''):
try:
recipients = parse_emails(recipients)
except ValidationError as e:
raise ValidationError('recipients: %s' % e.message)
try:
cc = parse_emails(cc)
except ValidationError as e:
raise ValidationError('c: %s' % e.message)
try:
bcc = parse_emails(bcc)
except ValidationError as e:
raise ValidationError('bcc: %s' % e.message)
if sender is None:
sender = settings.DEFAULT_FROM_EMAIL
priority = parse_priority(priority)
if log_level is None:
log_level = get_log_level()
if not commit:
if priority == PRIORITY.now:
raise ValueError("send_many() can't be used with priority = 'now'")
if attachments:
raise ValueError("Can't add attachments with send_many()")
if template:
if subject:
raise ValueError('You can\'t specify both "template" and "subject" arguments')
if message:
raise ValueError('You can\'t specify both "template" and "message" arguments')
if html_message:
raise ValueError('You can\'t specify both "template" and "html_message" arguments')
# template can be an EmailTemplate instance or name
if isinstance(template, EmailTemplate):
template = template
# If language is specified, ensure template uses the right language
if language:
if template.language != language:
template = template.translated_templates.get(language=language)
else:
template = get_email_template(template, language)
if backend and backend not in get_available_backends().keys():
raise ValueError('%s is not a valid backend alias' % backend)
email = create(sender, recipients, cc, bcc, subject, message, html_message,
context, scheduled_time, headers, template, priority,
render_on_delivery, commit=commit, backend=backend)
if attachments:
attachments = create_attachments(attachments)
email.attachments.add(*attachments)
if priority == PRIORITY.now:
email.dispatch(log_level=log_level)
return email
def get_queued():
"""
Returns a list of emails that should be sent:
- Status is queued
- Has scheduled_time lower than the current time or None
"""
return Email.objects.filter(status=STATUS.queued) \
.select_related('template') \
.filter(Q(scheduled_time__lte=now()) | Q(scheduled_time=None)) \
.order_by(*get_sending_order()).prefetch_related('attachments')[:get_batch_size()]
def send_queued(processes=1, log_level=None):
"""
Sends out all queued mails that has scheduled_time less than now or None
"""
queued_emails = get_queued()
total_sent, total_failed = 0, 0
total_email = len(queued_emails)
logger.info('Started sending %s emails with %s processes.' %
(total_email, processes))
if log_level is None:
log_level = get_log_level()
if queued_emails:
# Don't use more processes than number of emails
if total_email < processes:
processes = total_email
if processes == 1:
total_sent, total_failed = _send_bulk(queued_emails,
uses_multiprocessing=False,
log_level=log_level)
else:
email_lists = split_emails(queued_emails, processes)
pool = Pool(processes)
results = pool.map(_send_bulk, email_lists)
pool.terminate()
total_sent = sum([result[0] for result in results])
total_failed = sum([result[1] for result in results])
message = '%s emails attempted, %s sent, %s failed' % (
total_email,
total_sent,
total_failed
)
logger.info(message)
return (total_sent, total_failed)
def _send_bulk(emails, uses_multiprocessing=True, log_level=None):
# Multiprocessing does not play well with database connection
# Fix: Close connections on forking process
# https://groups.google.com/forum/#!topic/django-users/eCAIY9DAfG0
if uses_multiprocessing:
db_connection.close()
if log_level is None:
log_level = get_log_level()
sent_emails = []
failed_emails = [] # This is a list of two tuples (email, exception)
email_count = len(emails)
logger.info('Process started, sending %s emails' % email_count)
def send(email):
try:
email.dispatch(log_level=log_level, commit=False,
disconnect_after_delivery=False)
sent_emails.append(email)
logger.debug('Successfully sent email #%d' % email.id)
except Exception as e:
logger.debug('Failed to send email #%d' % email.id)
failed_emails.append((email, e))
# Prepare emails before we send these to threads for sending
# So we don't need to access the DB from within threads
for email in emails:
# Sometimes this can fail, for example when trying to render
# email from a faulty Django template
try:
email.prepare_email_message()
except Exception as e:
failed_emails.append((email, e))
number_of_threads = min(get_threads_per_process(), email_count)
pool = ThreadPool(number_of_threads)
pool.map(send, emails)
pool.close()
pool.join()
connections.close()
# Update statuses of sent and failed emails
email_ids = [email.id for email in sent_emails]
Email.objects.filter(id__in=email_ids).update(status=STATUS.sent)
email_ids = [email.id for (email, e) in failed_emails]
Email.objects.filter(id__in=email_ids).update(status=STATUS.failed)
# If log level is 0, log nothing, 1 logs only sending failures
# and 2 means log both successes and failures
if log_level >= 1:
logs = []
for (email, exception) in failed_emails:
logs.append(
Log(email=email, status=STATUS.failed,
message=str(exception),
exception_type=type(exception).__name__)
)
if logs:
Log.objects.bulk_create(logs)
if log_level == 2:
logs = []
for email in sent_emails:
logs.append(Log(email=email, status=STATUS.sent))
if logs:
Log.objects.bulk_create(logs)
logger.info(
'Process finished, %s attempted, %s sent, %s failed' % (
email_count, len(sent_emails), len(failed_emails)
)
)
return len(sent_emails), len(failed_emails)
|
ui/django-post_office
|
post_office/mail.py
|
get_queued
|
python
|
def get_queued():
return Email.objects.filter(status=STATUS.queued) \
.select_related('template') \
.filter(Q(scheduled_time__lte=now()) | Q(scheduled_time=None)) \
.order_by(*get_sending_order()).prefetch_related('attachments')[:get_batch_size()]
|
Returns a list of emails that should be sent:
- Status is queued
- Has scheduled_time lower than the current time or None
|
train
|
https://github.com/ui/django-post_office/blob/03e1ffb69829b475402f0f3ecd9f8a90af7da4bd/post_office/mail.py#L169-L178
|
[
"def get_batch_size():\n return get_config().get('BATCH_SIZE', 100)\n",
"def get_sending_order():\n return get_config().get('SENDING_ORDER', ['-priority'])\n"
] |
from multiprocessing import Pool
from multiprocessing.dummy import Pool as ThreadPool
from django.conf import settings
from django.core.exceptions import ValidationError
from django.db import connection as db_connection
from django.db.models import Q
from django.template import Context, Template
from django.utils.timezone import now
from .connections import connections
from .models import Email, EmailTemplate, Log, PRIORITY, STATUS
from .settings import (get_available_backends, get_batch_size,
get_log_level, get_sending_order, get_threads_per_process)
from .utils import (get_email_template, parse_emails, parse_priority,
split_emails, create_attachments)
from .logutils import setup_loghandlers
logger = setup_loghandlers("INFO")
def create(sender, recipients=None, cc=None, bcc=None, subject='', message='',
html_message='', context=None, scheduled_time=None, headers=None,
template=None, priority=None, render_on_delivery=False, commit=True,
backend=''):
"""
Creates an email from supplied keyword arguments. If template is
specified, email subject and content will be rendered during delivery.
"""
priority = parse_priority(priority)
status = None if priority == PRIORITY.now else STATUS.queued
if recipients is None:
recipients = []
if cc is None:
cc = []
if bcc is None:
bcc = []
if context is None:
context = ''
# If email is to be rendered during delivery, save all necessary
# information
if render_on_delivery:
email = Email(
from_email=sender,
to=recipients,
cc=cc,
bcc=bcc,
scheduled_time=scheduled_time,
headers=headers, priority=priority, status=status,
context=context, template=template, backend_alias=backend
)
else:
if template:
subject = template.subject
message = template.content
html_message = template.html_content
_context = Context(context or {})
subject = Template(subject).render(_context)
message = Template(message).render(_context)
html_message = Template(html_message).render(_context)
email = Email(
from_email=sender,
to=recipients,
cc=cc,
bcc=bcc,
subject=subject,
message=message,
html_message=html_message,
scheduled_time=scheduled_time,
headers=headers, priority=priority, status=status,
backend_alias=backend
)
if commit:
email.save()
return email
def send(recipients=None, sender=None, template=None, context=None, subject='',
message='', html_message='', scheduled_time=None, headers=None,
priority=None, attachments=None, render_on_delivery=False,
log_level=None, commit=True, cc=None, bcc=None, language='',
backend=''):
try:
recipients = parse_emails(recipients)
except ValidationError as e:
raise ValidationError('recipients: %s' % e.message)
try:
cc = parse_emails(cc)
except ValidationError as e:
raise ValidationError('c: %s' % e.message)
try:
bcc = parse_emails(bcc)
except ValidationError as e:
raise ValidationError('bcc: %s' % e.message)
if sender is None:
sender = settings.DEFAULT_FROM_EMAIL
priority = parse_priority(priority)
if log_level is None:
log_level = get_log_level()
if not commit:
if priority == PRIORITY.now:
raise ValueError("send_many() can't be used with priority = 'now'")
if attachments:
raise ValueError("Can't add attachments with send_many()")
if template:
if subject:
raise ValueError('You can\'t specify both "template" and "subject" arguments')
if message:
raise ValueError('You can\'t specify both "template" and "message" arguments')
if html_message:
raise ValueError('You can\'t specify both "template" and "html_message" arguments')
# template can be an EmailTemplate instance or name
if isinstance(template, EmailTemplate):
template = template
# If language is specified, ensure template uses the right language
if language:
if template.language != language:
template = template.translated_templates.get(language=language)
else:
template = get_email_template(template, language)
if backend and backend not in get_available_backends().keys():
raise ValueError('%s is not a valid backend alias' % backend)
email = create(sender, recipients, cc, bcc, subject, message, html_message,
context, scheduled_time, headers, template, priority,
render_on_delivery, commit=commit, backend=backend)
if attachments:
attachments = create_attachments(attachments)
email.attachments.add(*attachments)
if priority == PRIORITY.now:
email.dispatch(log_level=log_level)
return email
def send_many(kwargs_list):
"""
Similar to mail.send(), but this function accepts a list of kwargs.
Internally, it uses Django's bulk_create command for efficiency reasons.
Currently send_many() can't be used to send emails with priority = 'now'.
"""
emails = []
for kwargs in kwargs_list:
emails.append(send(commit=False, **kwargs))
Email.objects.bulk_create(emails)
def send_queued(processes=1, log_level=None):
"""
Sends out all queued mails that has scheduled_time less than now or None
"""
queued_emails = get_queued()
total_sent, total_failed = 0, 0
total_email = len(queued_emails)
logger.info('Started sending %s emails with %s processes.' %
(total_email, processes))
if log_level is None:
log_level = get_log_level()
if queued_emails:
# Don't use more processes than number of emails
if total_email < processes:
processes = total_email
if processes == 1:
total_sent, total_failed = _send_bulk(queued_emails,
uses_multiprocessing=False,
log_level=log_level)
else:
email_lists = split_emails(queued_emails, processes)
pool = Pool(processes)
results = pool.map(_send_bulk, email_lists)
pool.terminate()
total_sent = sum([result[0] for result in results])
total_failed = sum([result[1] for result in results])
message = '%s emails attempted, %s sent, %s failed' % (
total_email,
total_sent,
total_failed
)
logger.info(message)
return (total_sent, total_failed)
def _send_bulk(emails, uses_multiprocessing=True, log_level=None):
# Multiprocessing does not play well with database connection
# Fix: Close connections on forking process
# https://groups.google.com/forum/#!topic/django-users/eCAIY9DAfG0
if uses_multiprocessing:
db_connection.close()
if log_level is None:
log_level = get_log_level()
sent_emails = []
failed_emails = [] # This is a list of two tuples (email, exception)
email_count = len(emails)
logger.info('Process started, sending %s emails' % email_count)
def send(email):
try:
email.dispatch(log_level=log_level, commit=False,
disconnect_after_delivery=False)
sent_emails.append(email)
logger.debug('Successfully sent email #%d' % email.id)
except Exception as e:
logger.debug('Failed to send email #%d' % email.id)
failed_emails.append((email, e))
# Prepare emails before we send these to threads for sending
# So we don't need to access the DB from within threads
for email in emails:
# Sometimes this can fail, for example when trying to render
# email from a faulty Django template
try:
email.prepare_email_message()
except Exception as e:
failed_emails.append((email, e))
number_of_threads = min(get_threads_per_process(), email_count)
pool = ThreadPool(number_of_threads)
pool.map(send, emails)
pool.close()
pool.join()
connections.close()
# Update statuses of sent and failed emails
email_ids = [email.id for email in sent_emails]
Email.objects.filter(id__in=email_ids).update(status=STATUS.sent)
email_ids = [email.id for (email, e) in failed_emails]
Email.objects.filter(id__in=email_ids).update(status=STATUS.failed)
# If log level is 0, log nothing, 1 logs only sending failures
# and 2 means log both successes and failures
if log_level >= 1:
logs = []
for (email, exception) in failed_emails:
logs.append(
Log(email=email, status=STATUS.failed,
message=str(exception),
exception_type=type(exception).__name__)
)
if logs:
Log.objects.bulk_create(logs)
if log_level == 2:
logs = []
for email in sent_emails:
logs.append(Log(email=email, status=STATUS.sent))
if logs:
Log.objects.bulk_create(logs)
logger.info(
'Process finished, %s attempted, %s sent, %s failed' % (
email_count, len(sent_emails), len(failed_emails)
)
)
return len(sent_emails), len(failed_emails)
|
ui/django-post_office
|
post_office/mail.py
|
send_queued
|
python
|
def send_queued(processes=1, log_level=None):
queued_emails = get_queued()
total_sent, total_failed = 0, 0
total_email = len(queued_emails)
logger.info('Started sending %s emails with %s processes.' %
(total_email, processes))
if log_level is None:
log_level = get_log_level()
if queued_emails:
# Don't use more processes than number of emails
if total_email < processes:
processes = total_email
if processes == 1:
total_sent, total_failed = _send_bulk(queued_emails,
uses_multiprocessing=False,
log_level=log_level)
else:
email_lists = split_emails(queued_emails, processes)
pool = Pool(processes)
results = pool.map(_send_bulk, email_lists)
pool.terminate()
total_sent = sum([result[0] for result in results])
total_failed = sum([result[1] for result in results])
message = '%s emails attempted, %s sent, %s failed' % (
total_email,
total_sent,
total_failed
)
logger.info(message)
return (total_sent, total_failed)
|
Sends out all queued mails that has scheduled_time less than now or None
|
train
|
https://github.com/ui/django-post_office/blob/03e1ffb69829b475402f0f3ecd9f8a90af7da4bd/post_office/mail.py#L181-L220
|
[
"def get_log_level():\n return get_config().get('LOG_LEVEL', 2)\n",
"def split_emails(emails, split_count=1):\n # Group emails into X sublists\n # taken from http://www.garyrobinson.net/2008/04/splitting-a-pyt.html\n # Strange bug, only return 100 email if we do not evaluate the list\n if list(emails):\n return [emails[i::split_count] for i in range(split_count)]\n",
"def get_queued():\n \"\"\"\n Returns a list of emails that should be sent:\n - Status is queued\n - Has scheduled_time lower than the current time or None\n \"\"\"\n return Email.objects.filter(status=STATUS.queued) \\\n .select_related('template') \\\n .filter(Q(scheduled_time__lte=now()) | Q(scheduled_time=None)) \\\n .order_by(*get_sending_order()).prefetch_related('attachments')[:get_batch_size()]\n",
"def _send_bulk(emails, uses_multiprocessing=True, log_level=None):\n # Multiprocessing does not play well with database connection\n # Fix: Close connections on forking process\n # https://groups.google.com/forum/#!topic/django-users/eCAIY9DAfG0\n if uses_multiprocessing:\n db_connection.close()\n\n if log_level is None:\n log_level = get_log_level()\n\n sent_emails = []\n failed_emails = [] # This is a list of two tuples (email, exception)\n email_count = len(emails)\n\n logger.info('Process started, sending %s emails' % email_count)\n\n def send(email):\n try:\n email.dispatch(log_level=log_level, commit=False,\n disconnect_after_delivery=False)\n sent_emails.append(email)\n logger.debug('Successfully sent email #%d' % email.id)\n except Exception as e:\n logger.debug('Failed to send email #%d' % email.id)\n failed_emails.append((email, e))\n\n # Prepare emails before we send these to threads for sending\n # So we don't need to access the DB from within threads\n for email in emails:\n # Sometimes this can fail, for example when trying to render\n # email from a faulty Django template\n try:\n email.prepare_email_message()\n except Exception as e:\n failed_emails.append((email, e))\n\n number_of_threads = min(get_threads_per_process(), email_count)\n pool = ThreadPool(number_of_threads)\n\n pool.map(send, emails)\n pool.close()\n pool.join()\n\n connections.close()\n\n # Update statuses of sent and failed emails\n email_ids = [email.id for email in sent_emails]\n Email.objects.filter(id__in=email_ids).update(status=STATUS.sent)\n\n email_ids = [email.id for (email, e) in failed_emails]\n Email.objects.filter(id__in=email_ids).update(status=STATUS.failed)\n\n # If log level is 0, log nothing, 1 logs only sending failures\n # and 2 means log both successes and failures\n if log_level >= 1:\n\n logs = []\n for (email, exception) in failed_emails:\n logs.append(\n Log(email=email, status=STATUS.failed,\n message=str(exception),\n exception_type=type(exception).__name__)\n )\n\n if logs:\n Log.objects.bulk_create(logs)\n\n if log_level == 2:\n\n logs = []\n for email in sent_emails:\n logs.append(Log(email=email, status=STATUS.sent))\n\n if logs:\n Log.objects.bulk_create(logs)\n\n logger.info(\n 'Process finished, %s attempted, %s sent, %s failed' % (\n email_count, len(sent_emails), len(failed_emails)\n )\n )\n\n return len(sent_emails), len(failed_emails)\n"
] |
from multiprocessing import Pool
from multiprocessing.dummy import Pool as ThreadPool
from django.conf import settings
from django.core.exceptions import ValidationError
from django.db import connection as db_connection
from django.db.models import Q
from django.template import Context, Template
from django.utils.timezone import now
from .connections import connections
from .models import Email, EmailTemplate, Log, PRIORITY, STATUS
from .settings import (get_available_backends, get_batch_size,
get_log_level, get_sending_order, get_threads_per_process)
from .utils import (get_email_template, parse_emails, parse_priority,
split_emails, create_attachments)
from .logutils import setup_loghandlers
logger = setup_loghandlers("INFO")
def create(sender, recipients=None, cc=None, bcc=None, subject='', message='',
html_message='', context=None, scheduled_time=None, headers=None,
template=None, priority=None, render_on_delivery=False, commit=True,
backend=''):
"""
Creates an email from supplied keyword arguments. If template is
specified, email subject and content will be rendered during delivery.
"""
priority = parse_priority(priority)
status = None if priority == PRIORITY.now else STATUS.queued
if recipients is None:
recipients = []
if cc is None:
cc = []
if bcc is None:
bcc = []
if context is None:
context = ''
# If email is to be rendered during delivery, save all necessary
# information
if render_on_delivery:
email = Email(
from_email=sender,
to=recipients,
cc=cc,
bcc=bcc,
scheduled_time=scheduled_time,
headers=headers, priority=priority, status=status,
context=context, template=template, backend_alias=backend
)
else:
if template:
subject = template.subject
message = template.content
html_message = template.html_content
_context = Context(context or {})
subject = Template(subject).render(_context)
message = Template(message).render(_context)
html_message = Template(html_message).render(_context)
email = Email(
from_email=sender,
to=recipients,
cc=cc,
bcc=bcc,
subject=subject,
message=message,
html_message=html_message,
scheduled_time=scheduled_time,
headers=headers, priority=priority, status=status,
backend_alias=backend
)
if commit:
email.save()
return email
def send(recipients=None, sender=None, template=None, context=None, subject='',
message='', html_message='', scheduled_time=None, headers=None,
priority=None, attachments=None, render_on_delivery=False,
log_level=None, commit=True, cc=None, bcc=None, language='',
backend=''):
try:
recipients = parse_emails(recipients)
except ValidationError as e:
raise ValidationError('recipients: %s' % e.message)
try:
cc = parse_emails(cc)
except ValidationError as e:
raise ValidationError('c: %s' % e.message)
try:
bcc = parse_emails(bcc)
except ValidationError as e:
raise ValidationError('bcc: %s' % e.message)
if sender is None:
sender = settings.DEFAULT_FROM_EMAIL
priority = parse_priority(priority)
if log_level is None:
log_level = get_log_level()
if not commit:
if priority == PRIORITY.now:
raise ValueError("send_many() can't be used with priority = 'now'")
if attachments:
raise ValueError("Can't add attachments with send_many()")
if template:
if subject:
raise ValueError('You can\'t specify both "template" and "subject" arguments')
if message:
raise ValueError('You can\'t specify both "template" and "message" arguments')
if html_message:
raise ValueError('You can\'t specify both "template" and "html_message" arguments')
# template can be an EmailTemplate instance or name
if isinstance(template, EmailTemplate):
template = template
# If language is specified, ensure template uses the right language
if language:
if template.language != language:
template = template.translated_templates.get(language=language)
else:
template = get_email_template(template, language)
if backend and backend not in get_available_backends().keys():
raise ValueError('%s is not a valid backend alias' % backend)
email = create(sender, recipients, cc, bcc, subject, message, html_message,
context, scheduled_time, headers, template, priority,
render_on_delivery, commit=commit, backend=backend)
if attachments:
attachments = create_attachments(attachments)
email.attachments.add(*attachments)
if priority == PRIORITY.now:
email.dispatch(log_level=log_level)
return email
def send_many(kwargs_list):
"""
Similar to mail.send(), but this function accepts a list of kwargs.
Internally, it uses Django's bulk_create command for efficiency reasons.
Currently send_many() can't be used to send emails with priority = 'now'.
"""
emails = []
for kwargs in kwargs_list:
emails.append(send(commit=False, **kwargs))
Email.objects.bulk_create(emails)
def get_queued():
"""
Returns a list of emails that should be sent:
- Status is queued
- Has scheduled_time lower than the current time or None
"""
return Email.objects.filter(status=STATUS.queued) \
.select_related('template') \
.filter(Q(scheduled_time__lte=now()) | Q(scheduled_time=None)) \
.order_by(*get_sending_order()).prefetch_related('attachments')[:get_batch_size()]
def _send_bulk(emails, uses_multiprocessing=True, log_level=None):
# Multiprocessing does not play well with database connection
# Fix: Close connections on forking process
# https://groups.google.com/forum/#!topic/django-users/eCAIY9DAfG0
if uses_multiprocessing:
db_connection.close()
if log_level is None:
log_level = get_log_level()
sent_emails = []
failed_emails = [] # This is a list of two tuples (email, exception)
email_count = len(emails)
logger.info('Process started, sending %s emails' % email_count)
def send(email):
try:
email.dispatch(log_level=log_level, commit=False,
disconnect_after_delivery=False)
sent_emails.append(email)
logger.debug('Successfully sent email #%d' % email.id)
except Exception as e:
logger.debug('Failed to send email #%d' % email.id)
failed_emails.append((email, e))
# Prepare emails before we send these to threads for sending
# So we don't need to access the DB from within threads
for email in emails:
# Sometimes this can fail, for example when trying to render
# email from a faulty Django template
try:
email.prepare_email_message()
except Exception as e:
failed_emails.append((email, e))
number_of_threads = min(get_threads_per_process(), email_count)
pool = ThreadPool(number_of_threads)
pool.map(send, emails)
pool.close()
pool.join()
connections.close()
# Update statuses of sent and failed emails
email_ids = [email.id for email in sent_emails]
Email.objects.filter(id__in=email_ids).update(status=STATUS.sent)
email_ids = [email.id for (email, e) in failed_emails]
Email.objects.filter(id__in=email_ids).update(status=STATUS.failed)
# If log level is 0, log nothing, 1 logs only sending failures
# and 2 means log both successes and failures
if log_level >= 1:
logs = []
for (email, exception) in failed_emails:
logs.append(
Log(email=email, status=STATUS.failed,
message=str(exception),
exception_type=type(exception).__name__)
)
if logs:
Log.objects.bulk_create(logs)
if log_level == 2:
logs = []
for email in sent_emails:
logs.append(Log(email=email, status=STATUS.sent))
if logs:
Log.objects.bulk_create(logs)
logger.info(
'Process finished, %s attempted, %s sent, %s failed' % (
email_count, len(sent_emails), len(failed_emails)
)
)
return len(sent_emails), len(failed_emails)
|
ui/django-post_office
|
post_office/backends.py
|
EmailBackend.send_messages
|
python
|
def send_messages(self, email_messages):
from .mail import create
from .utils import create_attachments
if not email_messages:
return
for email_message in email_messages:
subject = email_message.subject
from_email = email_message.from_email
message = email_message.body
headers = email_message.extra_headers
# Check whether email has 'text/html' alternative
alternatives = getattr(email_message, 'alternatives', ())
for alternative in alternatives:
if alternative[1].startswith('text/html'):
html_message = alternative[0]
break
else:
html_message = ''
attachment_files = {}
for attachment in email_message.attachments:
if isinstance(attachment, MIMEBase):
attachment_files[attachment.get_filename()] = {
'file': ContentFile(attachment.get_payload()),
'mimetype': attachment.get_content_type(),
'headers': OrderedDict(attachment.items()),
}
else:
attachment_files[attachment[0]] = ContentFile(attachment[1])
email = create(sender=from_email,
recipients=email_message.to, cc=email_message.cc,
bcc=email_message.bcc, subject=subject,
message=message, html_message=html_message,
headers=headers)
if attachment_files:
attachments = create_attachments(attachment_files)
email.attachments.add(*attachments)
if get_default_priority() == 'now':
email.dispatch()
|
Queue one or more EmailMessage objects and returns the number of
email messages sent.
|
train
|
https://github.com/ui/django-post_office/blob/03e1ffb69829b475402f0f3ecd9f8a90af7da4bd/post_office/backends.py#L17-L66
|
[
"def get_default_priority():\n return get_config().get('DEFAULT_PRIORITY', 'medium')\n",
"def create(sender, recipients=None, cc=None, bcc=None, subject='', message='',\n html_message='', context=None, scheduled_time=None, headers=None,\n template=None, priority=None, render_on_delivery=False, commit=True,\n backend=''):\n \"\"\"\n Creates an email from supplied keyword arguments. If template is\n specified, email subject and content will be rendered during delivery.\n \"\"\"\n priority = parse_priority(priority)\n status = None if priority == PRIORITY.now else STATUS.queued\n\n if recipients is None:\n recipients = []\n if cc is None:\n cc = []\n if bcc is None:\n bcc = []\n if context is None:\n context = ''\n\n # If email is to be rendered during delivery, save all necessary\n # information\n if render_on_delivery:\n email = Email(\n from_email=sender,\n to=recipients,\n cc=cc,\n bcc=bcc,\n scheduled_time=scheduled_time,\n headers=headers, priority=priority, status=status,\n context=context, template=template, backend_alias=backend\n )\n\n else:\n\n if template:\n subject = template.subject\n message = template.content\n html_message = template.html_content\n\n _context = Context(context or {})\n subject = Template(subject).render(_context)\n message = Template(message).render(_context)\n html_message = Template(html_message).render(_context)\n\n email = Email(\n from_email=sender,\n to=recipients,\n cc=cc,\n bcc=bcc,\n subject=subject,\n message=message,\n html_message=html_message,\n scheduled_time=scheduled_time,\n headers=headers, priority=priority, status=status,\n backend_alias=backend\n )\n\n if commit:\n email.save()\n\n return email\n",
"def create_attachments(attachment_files):\n \"\"\"\n Create Attachment instances from files\n\n attachment_files is a dict of:\n * Key - the filename to be used for the attachment.\n * Value - file-like object, or a filename to open OR a dict of {'file': file-like-object, 'mimetype': string}\n\n Returns a list of Attachment objects\n \"\"\"\n attachments = []\n for filename, filedata in attachment_files.items():\n\n if isinstance(filedata, dict):\n content = filedata.get('file', None)\n mimetype = filedata.get('mimetype', None)\n headers = filedata.get('headers', None)\n else:\n content = filedata\n mimetype = None\n headers = None\n\n opened_file = None\n\n if isinstance(content, string_types):\n # `content` is a filename - try to open the file\n opened_file = open(content, 'rb')\n content = File(opened_file)\n\n attachment = Attachment()\n if mimetype:\n attachment.mimetype = mimetype\n attachment.headers = headers\n attachment.file.save(filename, content=content, save=True)\n\n attachments.append(attachment)\n\n if opened_file is not None:\n opened_file.close()\n\n return attachments\n"
] |
class EmailBackend(BaseEmailBackend):
def open(self):
pass
def close(self):
pass
|
ui/django-post_office
|
post_office/lockfile.py
|
FileLock.valid_lock
|
python
|
def valid_lock(self):
lock_pid = self.get_lock_pid()
# If we're unable to get lock_pid
if lock_pid is None:
return False
# this is our process
if self._pid == lock_pid:
return True
# it is/was another process
# see if it is running
try:
os.kill(lock_pid, 0)
except OSError:
self.release()
return False
# it is running
return True
|
See if the lock exists and is left over from an old process.
|
train
|
https://github.com/ui/django-post_office/blob/03e1ffb69829b475402f0f3ecd9f8a90af7da4bd/post_office/lockfile.py#L56-L80
|
[
"def get_lock_pid(self):\n try:\n return int(open(self.lock_filename).read())\n except IOError:\n # If we can't read symbolic link, there are two possibilities:\n # 1. The symbolic link is dead (point to non existing file)\n # 2. Symbolic link is not there\n # In either case, we can safely release the lock\n self.release()\n except ValueError:\n # most likely an empty or otherwise invalid lock file\n self.release()\n",
"def release(self):\n \"\"\"Try to delete the lock files. Doesn't matter if we fail\"\"\"\n if self.lock_filename != self.pid_filename:\n try:\n os.unlink(self.lock_filename)\n except OSError:\n pass\n\n try:\n os.remove(self.pid_filename)\n except OSError:\n pass\n"
] |
class FileLock(object):
def __init__(self, lock_filename, timeout=None, force=False):
self.lock_filename = '%s.lock' % lock_filename
self.timeout = timeout
self.force = force
self._pid = str(os.getpid())
# Store pid in a file in the same directory as desired lockname
self.pid_filename = os.path.join(
os.path.dirname(self.lock_filename),
self._pid,
) + '.lock'
def get_lock_pid(self):
try:
return int(open(self.lock_filename).read())
except IOError:
# If we can't read symbolic link, there are two possibilities:
# 1. The symbolic link is dead (point to non existing file)
# 2. Symbolic link is not there
# In either case, we can safely release the lock
self.release()
except ValueError:
# most likely an empty or otherwise invalid lock file
self.release()
def is_locked(self, force=False):
# We aren't locked
if not self.valid_lock():
return False
# We are locked, but we want to force it without waiting
if not self.timeout:
if self.force:
self.release()
return False
else:
# We're not waiting or forcing the lock
raise FileLocked()
# Locked, but want to wait for an unlock
interval = .1
intervals = int(self.timeout / interval)
while intervals:
if self.valid_lock():
intervals -= 1
time.sleep(interval)
#print('stopping %s' % intervals)
else:
return True
# check one last time
if self.valid_lock():
if self.force:
self.release()
else:
# still locked :(
raise FileLocked()
def acquire(self):
"""Create a pid filename and create a symlink (the actual lock file)
across platforms that points to it. Symlink is used because it's an
atomic operation across platforms.
"""
pid_file = os.open(self.pid_filename, os.O_CREAT | os.O_EXCL | os.O_RDWR)
os.write(pid_file, str(os.getpid()).encode('utf-8'))
os.close(pid_file)
if hasattr(os, 'symlink') and platform.system() != 'Windows':
os.symlink(self.pid_filename, self.lock_filename)
else:
# Windows platforms doesn't support symlinks, at least not through the os API
self.lock_filename = self.pid_filename
def release(self):
"""Try to delete the lock files. Doesn't matter if we fail"""
if self.lock_filename != self.pid_filename:
try:
os.unlink(self.lock_filename)
except OSError:
pass
try:
os.remove(self.pid_filename)
except OSError:
pass
def __enter__(self):
if not self.is_locked():
self.acquire()
return self
def __exit__(self, type, value, traceback):
self.release()
|
ui/django-post_office
|
post_office/lockfile.py
|
FileLock.acquire
|
python
|
def acquire(self):
pid_file = os.open(self.pid_filename, os.O_CREAT | os.O_EXCL | os.O_RDWR)
os.write(pid_file, str(os.getpid()).encode('utf-8'))
os.close(pid_file)
if hasattr(os, 'symlink') and platform.system() != 'Windows':
os.symlink(self.pid_filename, self.lock_filename)
else:
# Windows platforms doesn't support symlinks, at least not through the os API
self.lock_filename = self.pid_filename
|
Create a pid filename and create a symlink (the actual lock file)
across platforms that points to it. Symlink is used because it's an
atomic operation across platforms.
|
train
|
https://github.com/ui/django-post_office/blob/03e1ffb69829b475402f0f3ecd9f8a90af7da4bd/post_office/lockfile.py#L116-L130
| null |
class FileLock(object):
def __init__(self, lock_filename, timeout=None, force=False):
self.lock_filename = '%s.lock' % lock_filename
self.timeout = timeout
self.force = force
self._pid = str(os.getpid())
# Store pid in a file in the same directory as desired lockname
self.pid_filename = os.path.join(
os.path.dirname(self.lock_filename),
self._pid,
) + '.lock'
def get_lock_pid(self):
try:
return int(open(self.lock_filename).read())
except IOError:
# If we can't read symbolic link, there are two possibilities:
# 1. The symbolic link is dead (point to non existing file)
# 2. Symbolic link is not there
# In either case, we can safely release the lock
self.release()
except ValueError:
# most likely an empty or otherwise invalid lock file
self.release()
def valid_lock(self):
"""
See if the lock exists and is left over from an old process.
"""
lock_pid = self.get_lock_pid()
# If we're unable to get lock_pid
if lock_pid is None:
return False
# this is our process
if self._pid == lock_pid:
return True
# it is/was another process
# see if it is running
try:
os.kill(lock_pid, 0)
except OSError:
self.release()
return False
# it is running
return True
def is_locked(self, force=False):
# We aren't locked
if not self.valid_lock():
return False
# We are locked, but we want to force it without waiting
if not self.timeout:
if self.force:
self.release()
return False
else:
# We're not waiting or forcing the lock
raise FileLocked()
# Locked, but want to wait for an unlock
interval = .1
intervals = int(self.timeout / interval)
while intervals:
if self.valid_lock():
intervals -= 1
time.sleep(interval)
#print('stopping %s' % intervals)
else:
return True
# check one last time
if self.valid_lock():
if self.force:
self.release()
else:
# still locked :(
raise FileLocked()
def release(self):
"""Try to delete the lock files. Doesn't matter if we fail"""
if self.lock_filename != self.pid_filename:
try:
os.unlink(self.lock_filename)
except OSError:
pass
try:
os.remove(self.pid_filename)
except OSError:
pass
def __enter__(self):
if not self.is_locked():
self.acquire()
return self
def __exit__(self, type, value, traceback):
self.release()
|
ui/django-post_office
|
post_office/lockfile.py
|
FileLock.release
|
python
|
def release(self):
if self.lock_filename != self.pid_filename:
try:
os.unlink(self.lock_filename)
except OSError:
pass
try:
os.remove(self.pid_filename)
except OSError:
pass
|
Try to delete the lock files. Doesn't matter if we fail
|
train
|
https://github.com/ui/django-post_office/blob/03e1ffb69829b475402f0f3ecd9f8a90af7da4bd/post_office/lockfile.py#L133-L144
| null |
class FileLock(object):
def __init__(self, lock_filename, timeout=None, force=False):
self.lock_filename = '%s.lock' % lock_filename
self.timeout = timeout
self.force = force
self._pid = str(os.getpid())
# Store pid in a file in the same directory as desired lockname
self.pid_filename = os.path.join(
os.path.dirname(self.lock_filename),
self._pid,
) + '.lock'
def get_lock_pid(self):
try:
return int(open(self.lock_filename).read())
except IOError:
# If we can't read symbolic link, there are two possibilities:
# 1. The symbolic link is dead (point to non existing file)
# 2. Symbolic link is not there
# In either case, we can safely release the lock
self.release()
except ValueError:
# most likely an empty or otherwise invalid lock file
self.release()
def valid_lock(self):
"""
See if the lock exists and is left over from an old process.
"""
lock_pid = self.get_lock_pid()
# If we're unable to get lock_pid
if lock_pid is None:
return False
# this is our process
if self._pid == lock_pid:
return True
# it is/was another process
# see if it is running
try:
os.kill(lock_pid, 0)
except OSError:
self.release()
return False
# it is running
return True
def is_locked(self, force=False):
# We aren't locked
if not self.valid_lock():
return False
# We are locked, but we want to force it without waiting
if not self.timeout:
if self.force:
self.release()
return False
else:
# We're not waiting or forcing the lock
raise FileLocked()
# Locked, but want to wait for an unlock
interval = .1
intervals = int(self.timeout / interval)
while intervals:
if self.valid_lock():
intervals -= 1
time.sleep(interval)
#print('stopping %s' % intervals)
else:
return True
# check one last time
if self.valid_lock():
if self.force:
self.release()
else:
# still locked :(
raise FileLocked()
def acquire(self):
"""Create a pid filename and create a symlink (the actual lock file)
across platforms that points to it. Symlink is used because it's an
atomic operation across platforms.
"""
pid_file = os.open(self.pid_filename, os.O_CREAT | os.O_EXCL | os.O_RDWR)
os.write(pid_file, str(os.getpid()).encode('utf-8'))
os.close(pid_file)
if hasattr(os, 'symlink') and platform.system() != 'Windows':
os.symlink(self.pid_filename, self.lock_filename)
else:
# Windows platforms doesn't support symlinks, at least not through the os API
self.lock_filename = self.pid_filename
def __enter__(self):
if not self.is_locked():
self.acquire()
return self
def __exit__(self, type, value, traceback):
self.release()
|
ui/django-post_office
|
post_office/validators.py
|
validate_email_with_name
|
python
|
def validate_email_with_name(value):
value = force_text(value)
recipient = value
if '<' and '>' in value:
start = value.find('<') + 1
end = value.find('>')
if start < end:
recipient = value[start:end]
validate_email(recipient)
|
Validate email address.
Both "Recipient Name <email@example.com>" and "email@example.com" are valid.
|
train
|
https://github.com/ui/django-post_office/blob/03e1ffb69829b475402f0f3ecd9f8a90af7da4bd/post_office/validators.py#L9-L24
| null |
from django.core.exceptions import ValidationError
from django.core.validators import validate_email
from django.template import Template, TemplateSyntaxError, TemplateDoesNotExist
from django.utils.encoding import force_text
from .compat import text_type
def validate_comma_separated_emails(value):
"""
Validate every email address in a comma separated list of emails.
"""
if not isinstance(value, (tuple, list)):
raise ValidationError('Email list must be a list/tuple.')
for email in value:
try:
validate_email_with_name(email)
except ValidationError:
raise ValidationError('Invalid email: %s' % email, code='invalid')
def validate_template_syntax(source):
"""
Basic Django Template syntax validation. This allows for robuster template
authoring.
"""
try:
Template(source)
except (TemplateSyntaxError, TemplateDoesNotExist) as err:
raise ValidationError(text_type(err))
|
ui/django-post_office
|
post_office/validators.py
|
validate_comma_separated_emails
|
python
|
def validate_comma_separated_emails(value):
if not isinstance(value, (tuple, list)):
raise ValidationError('Email list must be a list/tuple.')
for email in value:
try:
validate_email_with_name(email)
except ValidationError:
raise ValidationError('Invalid email: %s' % email, code='invalid')
|
Validate every email address in a comma separated list of emails.
|
train
|
https://github.com/ui/django-post_office/blob/03e1ffb69829b475402f0f3ecd9f8a90af7da4bd/post_office/validators.py#L27-L38
|
[
"def validate_email_with_name(value):\n \"\"\"\n Validate email address.\n\n Both \"Recipient Name <email@example.com>\" and \"email@example.com\" are valid.\n \"\"\"\n value = force_text(value)\n\n recipient = value\n if '<' and '>' in value:\n start = value.find('<') + 1\n end = value.find('>')\n if start < end:\n recipient = value[start:end]\n\n validate_email(recipient)\n"
] |
from django.core.exceptions import ValidationError
from django.core.validators import validate_email
from django.template import Template, TemplateSyntaxError, TemplateDoesNotExist
from django.utils.encoding import force_text
from .compat import text_type
def validate_email_with_name(value):
"""
Validate email address.
Both "Recipient Name <email@example.com>" and "email@example.com" are valid.
"""
value = force_text(value)
recipient = value
if '<' and '>' in value:
start = value.find('<') + 1
end = value.find('>')
if start < end:
recipient = value[start:end]
validate_email(recipient)
def validate_template_syntax(source):
"""
Basic Django Template syntax validation. This allows for robuster template
authoring.
"""
try:
Template(source)
except (TemplateSyntaxError, TemplateDoesNotExist) as err:
raise ValidationError(text_type(err))
|
ui/django-post_office
|
post_office/validators.py
|
validate_template_syntax
|
python
|
def validate_template_syntax(source):
try:
Template(source)
except (TemplateSyntaxError, TemplateDoesNotExist) as err:
raise ValidationError(text_type(err))
|
Basic Django Template syntax validation. This allows for robuster template
authoring.
|
train
|
https://github.com/ui/django-post_office/blob/03e1ffb69829b475402f0f3ecd9f8a90af7da4bd/post_office/validators.py#L41-L49
| null |
from django.core.exceptions import ValidationError
from django.core.validators import validate_email
from django.template import Template, TemplateSyntaxError, TemplateDoesNotExist
from django.utils.encoding import force_text
from .compat import text_type
def validate_email_with_name(value):
"""
Validate email address.
Both "Recipient Name <email@example.com>" and "email@example.com" are valid.
"""
value = force_text(value)
recipient = value
if '<' and '>' in value:
start = value.find('<') + 1
end = value.find('>')
if start < end:
recipient = value[start:end]
validate_email(recipient)
def validate_comma_separated_emails(value):
"""
Validate every email address in a comma separated list of emails.
"""
if not isinstance(value, (tuple, list)):
raise ValidationError('Email list must be a list/tuple.')
for email in value:
try:
validate_email_with_name(email)
except ValidationError:
raise ValidationError('Invalid email: %s' % email, code='invalid')
|
ui/django-post_office
|
post_office/settings.py
|
get_available_backends
|
python
|
def get_available_backends():
backends = get_config().get('BACKENDS', {})
if backends:
return backends
# Try to get backend settings from old style
# POST_OFFICE = {
# 'EMAIL_BACKEND': 'mybackend'
# }
backend = get_config().get('EMAIL_BACKEND')
if backend:
warnings.warn('Please use the new POST_OFFICE["BACKENDS"] settings',
DeprecationWarning)
backends['default'] = backend
return backends
# Fall back to Django's EMAIL_BACKEND definition
backends['default'] = getattr(
settings, 'EMAIL_BACKEND',
'django.core.mail.backends.smtp.EmailBackend')
# If EMAIL_BACKEND is set to use PostOfficeBackend
# and POST_OFFICE_BACKEND is not set, fall back to SMTP
if 'post_office.EmailBackend' in backends['default']:
backends['default'] = 'django.core.mail.backends.smtp.EmailBackend'
return backends
|
Returns a dictionary of defined backend classes. For example:
{
'default': 'django.core.mail.backends.smtp.EmailBackend',
'locmem': 'django.core.mail.backends.locmem.EmailBackend',
}
|
train
|
https://github.com/ui/django-post_office/blob/03e1ffb69829b475402f0f3ecd9f8a90af7da4bd/post_office/settings.py#L14-L48
|
[
"def get_config():\n \"\"\"\n Returns Post Office's configuration in dictionary format. e.g:\n POST_OFFICE = {\n 'BATCH_SIZE': 1000\n }\n \"\"\"\n return getattr(settings, 'POST_OFFICE', {})\n"
] |
import warnings
from django.conf import settings
from django.core.cache.backends.base import InvalidCacheBackendError
from django.template import engines as template_engines
from .compat import import_attribute, get_cache
def get_backend(alias='default'):
return get_available_backends()[alias]
def get_cache_backend():
if hasattr(settings, 'CACHES'):
if "post_office" in settings.CACHES:
return get_cache("post_office")
else:
# Sometimes this raises InvalidCacheBackendError, which is ok too
try:
return get_cache("default")
except InvalidCacheBackendError:
pass
return None
def get_config():
"""
Returns Post Office's configuration in dictionary format. e.g:
POST_OFFICE = {
'BATCH_SIZE': 1000
}
"""
return getattr(settings, 'POST_OFFICE', {})
def get_batch_size():
return get_config().get('BATCH_SIZE', 100)
def get_threads_per_process():
return get_config().get('THREADS_PER_PROCESS', 5)
def get_default_priority():
return get_config().get('DEFAULT_PRIORITY', 'medium')
def get_log_level():
return get_config().get('LOG_LEVEL', 2)
def get_sending_order():
return get_config().get('SENDING_ORDER', ['-priority'])
def get_template_engine():
using = get_config().get('TEMPLATE_ENGINE', 'django')
return template_engines[using]
CONTEXT_FIELD_CLASS = get_config().get('CONTEXT_FIELD_CLASS',
'jsonfield.JSONField')
context_field_class = import_attribute(CONTEXT_FIELD_CLASS)
|
ui/django-post_office
|
post_office/utils.py
|
send_mail
|
python
|
def send_mail(subject, message, from_email, recipient_list, html_message='',
scheduled_time=None, headers=None, priority=PRIORITY.medium):
subject = force_text(subject)
status = None if priority == PRIORITY.now else STATUS.queued
emails = []
for address in recipient_list:
emails.append(
Email.objects.create(
from_email=from_email, to=address, subject=subject,
message=message, html_message=html_message, status=status,
headers=headers, priority=priority, scheduled_time=scheduled_time
)
)
if priority == PRIORITY.now:
for email in emails:
email.dispatch()
return emails
|
Add a new message to the mail queue. This is a replacement for Django's
``send_mail`` core email method.
|
train
|
https://github.com/ui/django-post_office/blob/03e1ffb69829b475402f0f3ecd9f8a90af7da4bd/post_office/utils.py#L13-L34
| null |
from django.conf import settings
from django.core.exceptions import ValidationError
from django.core.files import File
from django.utils.encoding import force_text
from post_office import cache
from .compat import string_types
from .models import Email, PRIORITY, STATUS, EmailTemplate, Attachment
from .settings import get_default_priority
from .validators import validate_email_with_name
def get_email_template(name, language=''):
"""
Function that returns an email template instance, from cache or DB.
"""
use_cache = getattr(settings, 'POST_OFFICE_CACHE', True)
if use_cache:
use_cache = getattr(settings, 'POST_OFFICE_TEMPLATE_CACHE', True)
if not use_cache:
return EmailTemplate.objects.get(name=name, language=language)
else:
composite_name = '%s:%s' % (name, language)
email_template = cache.get(composite_name)
if email_template is not None:
return email_template
else:
email_template = EmailTemplate.objects.get(name=name,
language=language)
cache.set(composite_name, email_template)
return email_template
def split_emails(emails, split_count=1):
# Group emails into X sublists
# taken from http://www.garyrobinson.net/2008/04/splitting-a-pyt.html
# Strange bug, only return 100 email if we do not evaluate the list
if list(emails):
return [emails[i::split_count] for i in range(split_count)]
def create_attachments(attachment_files):
"""
Create Attachment instances from files
attachment_files is a dict of:
* Key - the filename to be used for the attachment.
* Value - file-like object, or a filename to open OR a dict of {'file': file-like-object, 'mimetype': string}
Returns a list of Attachment objects
"""
attachments = []
for filename, filedata in attachment_files.items():
if isinstance(filedata, dict):
content = filedata.get('file', None)
mimetype = filedata.get('mimetype', None)
headers = filedata.get('headers', None)
else:
content = filedata
mimetype = None
headers = None
opened_file = None
if isinstance(content, string_types):
# `content` is a filename - try to open the file
opened_file = open(content, 'rb')
content = File(opened_file)
attachment = Attachment()
if mimetype:
attachment.mimetype = mimetype
attachment.headers = headers
attachment.file.save(filename, content=content, save=True)
attachments.append(attachment)
if opened_file is not None:
opened_file.close()
return attachments
def parse_priority(priority):
if priority is None:
priority = get_default_priority()
# If priority is given as a string, returns the enum representation
if isinstance(priority, string_types):
priority = getattr(PRIORITY, priority, None)
if priority is None:
raise ValueError('Invalid priority, must be one of: %s' %
', '.join(PRIORITY._fields))
return priority
def parse_emails(emails):
"""
A function that returns a list of valid email addresses.
This function will also convert a single email address into
a list of email addresses.
None value is also converted into an empty list.
"""
if isinstance(emails, string_types):
emails = [emails]
elif emails is None:
emails = []
for email in emails:
try:
validate_email_with_name(email)
except ValidationError:
raise ValidationError('%s is not a valid email address' % email)
return emails
|
ui/django-post_office
|
post_office/utils.py
|
get_email_template
|
python
|
def get_email_template(name, language=''):
use_cache = getattr(settings, 'POST_OFFICE_CACHE', True)
if use_cache:
use_cache = getattr(settings, 'POST_OFFICE_TEMPLATE_CACHE', True)
if not use_cache:
return EmailTemplate.objects.get(name=name, language=language)
else:
composite_name = '%s:%s' % (name, language)
email_template = cache.get(composite_name)
if email_template is not None:
return email_template
else:
email_template = EmailTemplate.objects.get(name=name,
language=language)
cache.set(composite_name, email_template)
return email_template
|
Function that returns an email template instance, from cache or DB.
|
train
|
https://github.com/ui/django-post_office/blob/03e1ffb69829b475402f0f3ecd9f8a90af7da4bd/post_office/utils.py#L37-L55
|
[
"def get(name):\n return cache_backend.get(get_cache_key(name))\n",
"def set(name, content):\n return cache_backend.set(get_cache_key(name), content)\n"
] |
from django.conf import settings
from django.core.exceptions import ValidationError
from django.core.files import File
from django.utils.encoding import force_text
from post_office import cache
from .compat import string_types
from .models import Email, PRIORITY, STATUS, EmailTemplate, Attachment
from .settings import get_default_priority
from .validators import validate_email_with_name
def send_mail(subject, message, from_email, recipient_list, html_message='',
scheduled_time=None, headers=None, priority=PRIORITY.medium):
"""
Add a new message to the mail queue. This is a replacement for Django's
``send_mail`` core email method.
"""
subject = force_text(subject)
status = None if priority == PRIORITY.now else STATUS.queued
emails = []
for address in recipient_list:
emails.append(
Email.objects.create(
from_email=from_email, to=address, subject=subject,
message=message, html_message=html_message, status=status,
headers=headers, priority=priority, scheduled_time=scheduled_time
)
)
if priority == PRIORITY.now:
for email in emails:
email.dispatch()
return emails
def split_emails(emails, split_count=1):
# Group emails into X sublists
# taken from http://www.garyrobinson.net/2008/04/splitting-a-pyt.html
# Strange bug, only return 100 email if we do not evaluate the list
if list(emails):
return [emails[i::split_count] for i in range(split_count)]
def create_attachments(attachment_files):
"""
Create Attachment instances from files
attachment_files is a dict of:
* Key - the filename to be used for the attachment.
* Value - file-like object, or a filename to open OR a dict of {'file': file-like-object, 'mimetype': string}
Returns a list of Attachment objects
"""
attachments = []
for filename, filedata in attachment_files.items():
if isinstance(filedata, dict):
content = filedata.get('file', None)
mimetype = filedata.get('mimetype', None)
headers = filedata.get('headers', None)
else:
content = filedata
mimetype = None
headers = None
opened_file = None
if isinstance(content, string_types):
# `content` is a filename - try to open the file
opened_file = open(content, 'rb')
content = File(opened_file)
attachment = Attachment()
if mimetype:
attachment.mimetype = mimetype
attachment.headers = headers
attachment.file.save(filename, content=content, save=True)
attachments.append(attachment)
if opened_file is not None:
opened_file.close()
return attachments
def parse_priority(priority):
if priority is None:
priority = get_default_priority()
# If priority is given as a string, returns the enum representation
if isinstance(priority, string_types):
priority = getattr(PRIORITY, priority, None)
if priority is None:
raise ValueError('Invalid priority, must be one of: %s' %
', '.join(PRIORITY._fields))
return priority
def parse_emails(emails):
"""
A function that returns a list of valid email addresses.
This function will also convert a single email address into
a list of email addresses.
None value is also converted into an empty list.
"""
if isinstance(emails, string_types):
emails = [emails]
elif emails is None:
emails = []
for email in emails:
try:
validate_email_with_name(email)
except ValidationError:
raise ValidationError('%s is not a valid email address' % email)
return emails
|
ui/django-post_office
|
post_office/utils.py
|
create_attachments
|
python
|
def create_attachments(attachment_files):
attachments = []
for filename, filedata in attachment_files.items():
if isinstance(filedata, dict):
content = filedata.get('file', None)
mimetype = filedata.get('mimetype', None)
headers = filedata.get('headers', None)
else:
content = filedata
mimetype = None
headers = None
opened_file = None
if isinstance(content, string_types):
# `content` is a filename - try to open the file
opened_file = open(content, 'rb')
content = File(opened_file)
attachment = Attachment()
if mimetype:
attachment.mimetype = mimetype
attachment.headers = headers
attachment.file.save(filename, content=content, save=True)
attachments.append(attachment)
if opened_file is not None:
opened_file.close()
return attachments
|
Create Attachment instances from files
attachment_files is a dict of:
* Key - the filename to be used for the attachment.
* Value - file-like object, or a filename to open OR a dict of {'file': file-like-object, 'mimetype': string}
Returns a list of Attachment objects
|
train
|
https://github.com/ui/django-post_office/blob/03e1ffb69829b475402f0f3ecd9f8a90af7da4bd/post_office/utils.py#L66-L106
| null |
from django.conf import settings
from django.core.exceptions import ValidationError
from django.core.files import File
from django.utils.encoding import force_text
from post_office import cache
from .compat import string_types
from .models import Email, PRIORITY, STATUS, EmailTemplate, Attachment
from .settings import get_default_priority
from .validators import validate_email_with_name
def send_mail(subject, message, from_email, recipient_list, html_message='',
scheduled_time=None, headers=None, priority=PRIORITY.medium):
"""
Add a new message to the mail queue. This is a replacement for Django's
``send_mail`` core email method.
"""
subject = force_text(subject)
status = None if priority == PRIORITY.now else STATUS.queued
emails = []
for address in recipient_list:
emails.append(
Email.objects.create(
from_email=from_email, to=address, subject=subject,
message=message, html_message=html_message, status=status,
headers=headers, priority=priority, scheduled_time=scheduled_time
)
)
if priority == PRIORITY.now:
for email in emails:
email.dispatch()
return emails
def get_email_template(name, language=''):
"""
Function that returns an email template instance, from cache or DB.
"""
use_cache = getattr(settings, 'POST_OFFICE_CACHE', True)
if use_cache:
use_cache = getattr(settings, 'POST_OFFICE_TEMPLATE_CACHE', True)
if not use_cache:
return EmailTemplate.objects.get(name=name, language=language)
else:
composite_name = '%s:%s' % (name, language)
email_template = cache.get(composite_name)
if email_template is not None:
return email_template
else:
email_template = EmailTemplate.objects.get(name=name,
language=language)
cache.set(composite_name, email_template)
return email_template
def split_emails(emails, split_count=1):
# Group emails into X sublists
# taken from http://www.garyrobinson.net/2008/04/splitting-a-pyt.html
# Strange bug, only return 100 email if we do not evaluate the list
if list(emails):
return [emails[i::split_count] for i in range(split_count)]
def parse_priority(priority):
if priority is None:
priority = get_default_priority()
# If priority is given as a string, returns the enum representation
if isinstance(priority, string_types):
priority = getattr(PRIORITY, priority, None)
if priority is None:
raise ValueError('Invalid priority, must be one of: %s' %
', '.join(PRIORITY._fields))
return priority
def parse_emails(emails):
"""
A function that returns a list of valid email addresses.
This function will also convert a single email address into
a list of email addresses.
None value is also converted into an empty list.
"""
if isinstance(emails, string_types):
emails = [emails]
elif emails is None:
emails = []
for email in emails:
try:
validate_email_with_name(email)
except ValidationError:
raise ValidationError('%s is not a valid email address' % email)
return emails
|
ui/django-post_office
|
post_office/utils.py
|
parse_emails
|
python
|
def parse_emails(emails):
if isinstance(emails, string_types):
emails = [emails]
elif emails is None:
emails = []
for email in emails:
try:
validate_email_with_name(email)
except ValidationError:
raise ValidationError('%s is not a valid email address' % email)
return emails
|
A function that returns a list of valid email addresses.
This function will also convert a single email address into
a list of email addresses.
None value is also converted into an empty list.
|
train
|
https://github.com/ui/django-post_office/blob/03e1ffb69829b475402f0f3ecd9f8a90af7da4bd/post_office/utils.py#L122-L141
|
[
"def validate_email_with_name(value):\n \"\"\"\n Validate email address.\n\n Both \"Recipient Name <email@example.com>\" and \"email@example.com\" are valid.\n \"\"\"\n value = force_text(value)\n\n recipient = value\n if '<' and '>' in value:\n start = value.find('<') + 1\n end = value.find('>')\n if start < end:\n recipient = value[start:end]\n\n validate_email(recipient)\n"
] |
from django.conf import settings
from django.core.exceptions import ValidationError
from django.core.files import File
from django.utils.encoding import force_text
from post_office import cache
from .compat import string_types
from .models import Email, PRIORITY, STATUS, EmailTemplate, Attachment
from .settings import get_default_priority
from .validators import validate_email_with_name
def send_mail(subject, message, from_email, recipient_list, html_message='',
scheduled_time=None, headers=None, priority=PRIORITY.medium):
"""
Add a new message to the mail queue. This is a replacement for Django's
``send_mail`` core email method.
"""
subject = force_text(subject)
status = None if priority == PRIORITY.now else STATUS.queued
emails = []
for address in recipient_list:
emails.append(
Email.objects.create(
from_email=from_email, to=address, subject=subject,
message=message, html_message=html_message, status=status,
headers=headers, priority=priority, scheduled_time=scheduled_time
)
)
if priority == PRIORITY.now:
for email in emails:
email.dispatch()
return emails
def get_email_template(name, language=''):
"""
Function that returns an email template instance, from cache or DB.
"""
use_cache = getattr(settings, 'POST_OFFICE_CACHE', True)
if use_cache:
use_cache = getattr(settings, 'POST_OFFICE_TEMPLATE_CACHE', True)
if not use_cache:
return EmailTemplate.objects.get(name=name, language=language)
else:
composite_name = '%s:%s' % (name, language)
email_template = cache.get(composite_name)
if email_template is not None:
return email_template
else:
email_template = EmailTemplate.objects.get(name=name,
language=language)
cache.set(composite_name, email_template)
return email_template
def split_emails(emails, split_count=1):
# Group emails into X sublists
# taken from http://www.garyrobinson.net/2008/04/splitting-a-pyt.html
# Strange bug, only return 100 email if we do not evaluate the list
if list(emails):
return [emails[i::split_count] for i in range(split_count)]
def create_attachments(attachment_files):
"""
Create Attachment instances from files
attachment_files is a dict of:
* Key - the filename to be used for the attachment.
* Value - file-like object, or a filename to open OR a dict of {'file': file-like-object, 'mimetype': string}
Returns a list of Attachment objects
"""
attachments = []
for filename, filedata in attachment_files.items():
if isinstance(filedata, dict):
content = filedata.get('file', None)
mimetype = filedata.get('mimetype', None)
headers = filedata.get('headers', None)
else:
content = filedata
mimetype = None
headers = None
opened_file = None
if isinstance(content, string_types):
# `content` is a filename - try to open the file
opened_file = open(content, 'rb')
content = File(opened_file)
attachment = Attachment()
if mimetype:
attachment.mimetype = mimetype
attachment.headers = headers
attachment.file.save(filename, content=content, save=True)
attachments.append(attachment)
if opened_file is not None:
opened_file.close()
return attachments
def parse_priority(priority):
if priority is None:
priority = get_default_priority()
# If priority is given as a string, returns the enum representation
if isinstance(priority, string_types):
priority = getattr(PRIORITY, priority, None)
if priority is None:
raise ValueError('Invalid priority, must be one of: %s' %
', '.join(PRIORITY._fields))
return priority
|
stefanfoulis/django-sendsms
|
sendsms/backends/smspubli.py
|
SmsBackend._send
|
python
|
def _send(self, message):
params = {
'V': SMSPUBLI_API_VERSION,
'UN': SMSPUBLI_USERNAME,
'PWD': SMSPUBLI_PASSWORD,
'R': SMSPUBLI_ROUTE,
'SA': message.from_phone,
'DA': ','.join(message.to),
'M': message.body.encode('latin-1'),
'DC': SMSPUBLI_DC,
'DR': SMSPUBLI_DR,
'UR': message.from_phone
}
if SMSPUBLI_ALLOW_LONG_SMS:
params['LM'] = '1'
response = requests.post(SMSPUBLI_API_URL, params)
if response.status_code != 200:
if not self.fail_silently:
raise
else:
return False
response_msg, response_code = response.content.split(':')
if response_msg == 'OK':
try:
if "," in response_code:
codes = map(int, response_code.split(","))
else:
codes = [int(response_code)]
for code in codes:
if code == -5:
#: TODO send error signal (no $$)
pass
elif code == -3:
#: TODO send error signal (incorrect num)
pass
return True
except (ValueError, TypeError):
if not self.fail_silently:
raise
return False
return False
|
Private method for send one message.
:param SmsMessage message: SmsMessage class instance.
:returns: True if message is sended else False
:rtype: bool
|
train
|
https://github.com/stefanfoulis/django-sendsms/blob/375f469789866853253eceba936ebcff98e83c07/sendsms/backends/smspubli.py#L59-L113
| null |
class SmsBackend(BaseSmsBackend):
"""
SMS Backend smspubli.com provider.
The methods "get_xxxxxx" serve to facilitate the inheritance. Thus if a private
project in the access data are dynamic, and are stored in the database. A child
class overrides the method "get_xxxx" to return data stored in the database.
"""
def get_username(self):
return SMSPUBLI_USERNAME
def get_password(self):
return SMSPUBLI_PASSWORD
def _send(self, message):
"""
Private method for send one message.
:param SmsMessage message: SmsMessage class instance.
:returns: True if message is sended else False
:rtype: bool
"""
params = {
'V': SMSPUBLI_API_VERSION,
'UN': SMSPUBLI_USERNAME,
'PWD': SMSPUBLI_PASSWORD,
'R': SMSPUBLI_ROUTE,
'SA': message.from_phone,
'DA': ','.join(message.to),
'M': message.body.encode('latin-1'),
'DC': SMSPUBLI_DC,
'DR': SMSPUBLI_DR,
'UR': message.from_phone
}
if SMSPUBLI_ALLOW_LONG_SMS:
params['LM'] = '1'
response = requests.post(SMSPUBLI_API_URL, params)
if response.status_code != 200:
if not self.fail_silently:
raise
else:
return False
response_msg, response_code = response.content.split(':')
if response_msg == 'OK':
try:
if "," in response_code:
codes = map(int, response_code.split(","))
else:
codes = [int(response_code)]
for code in codes:
if code == -5:
#: TODO send error signal (no $$)
pass
elif code == -3:
#: TODO send error signal (incorrect num)
pass
return True
except (ValueError, TypeError):
if not self.fail_silently:
raise
return False
return False
def send_messages(self, messages):
"""
Send messages.
:param list messages: List of SmsMessage instences.
:returns: number of messages seded succesful.
:rtype: int
"""
counter = 0
for message in messages:
res = self._send(message)
if res:
counter += 1
return counter
|
stefanfoulis/django-sendsms
|
sendsms/backends/filebased.py
|
SmsBackend._get_filename
|
python
|
def _get_filename(self):
if self._fname is None:
timestamp = datetime.datetime.now().strftime("%Y%m%d-%H%M%S")
fname = "%s-%s.log" % (timestamp, abs(id(self)))
self._fname = os.path.join(self.file_path, fname)
return self._fname
|
Return a unique file name.
|
train
|
https://github.com/stefanfoulis/django-sendsms/blob/375f469789866853253eceba936ebcff98e83c07/sendsms/backends/filebased.py#L43-L49
| null |
class SmsBackend(ConsoleSmsBackend):
def __init__(self, *args, **kwargs):
self._fname = None
if 'file_path' in kwargs:
self.file_path = kwargs.pop('file_path')
else:
self.file_path = getattr(settings, 'SMS_FILE_PATH', None)
# Make sure self.file_path is a string.
if not isinstance(self.file_path, basestring):
raise ImproperlyConfigured('Path for saving SMS is invalid: %r' % self.file_path)
self.file_path = os.path.abspath(self.file_path)
# Make sure that self.file_path is an directory if it exists.
if os.path.exists(self.file_path) and not os.path.isdir(self.file_path):
raise ImproperlyConfigured('Path for saving SMS messages exists, but is not a directory: %s' % self.file_path)
# Try to create it, if it not exists.
elif not os.path.exists(self.file_path):
try:
os.makedirs(self.file_path)
except (OSError, err):
raise ImproperlyConfigured('Could not create directory for saving SMS messages: %s (%s)' % (self.file_path, err))
# Make sure that self.file_path is writable.
if not os.access(self.file_path, os.W_OK):
raise ImproperlyConfigured('Could not write to directory: %s' % self.file_path)
# Finally, call super().
# Since we're using the console-based backend as a base,
# force the stream to be None, so we don't default to stdout
kwargs['stream'] = None
super(SmsBackend, self).__init__(*args, **kwargs)
def open(self):
if self.stream is None:
self.stream = open(self._get_filename(), 'a')
return True
return False
def close(self):
try:
if self.stream is not None:
self.stream.close()
finally:
self.stream = None
|
stefanfoulis/django-sendsms
|
sendsms/backends/smsglobal.py
|
SmsBackend.get_balance
|
python
|
def get_balance(self):
if not SMSGLOBAL_CHECK_BALANCE_COUNTRY:
raise Exception('SMSGLOBAL_CHECK_BALANCE_COUNTRY setting must be set to check balance.')
params = {
'user' : self.get_username(),
'password' : self.get_password(),
'country' : SMSGLOBAL_CHECK_BALANCE_COUNTRY,
}
req = urllib2.Request(SMSGLOBAL_API_URL_CHECKBALANCE, urllib.urlencode(params))
response = urllib2.urlopen(req).read()
# CREDITS:8658.44;COUNTRY:AU;SMS:3764.54;
if response.startswith('ERROR'):
raise Exception('Error retrieving balance: %s' % response.replace('ERROR:', ''))
return dict([(p.split(':')[0].lower(), p.split(':')[1]) for p in response.split(';') if len(p) > 0])
|
Get balance with provider.
|
train
|
https://github.com/stefanfoulis/django-sendsms/blob/375f469789866853253eceba936ebcff98e83c07/sendsms/backends/smsglobal.py#L32-L52
|
[
"def get_username(self):\n return SMSGLOBAL_USERNAME\n",
"def get_password(self):\n return SMSGLOBAL_PASSWORD\n"
] |
class SmsBackend(BaseSmsBackend):
"""
A wrapper that manages the SMS Global network connection.
Sending and parsing functionality borrowed from http://namingcrisis.net/code
"""
def get_username(self):
return SMSGLOBAL_USERNAME
def get_password(self):
return SMSGLOBAL_PASSWORD
def get_balance(self):
"""
Get balance with provider.
"""
if not SMSGLOBAL_CHECK_BALANCE_COUNTRY:
raise Exception('SMSGLOBAL_CHECK_BALANCE_COUNTRY setting must be set to check balance.')
params = {
'user' : self.get_username(),
'password' : self.get_password(),
'country' : SMSGLOBAL_CHECK_BALANCE_COUNTRY,
}
req = urllib2.Request(SMSGLOBAL_API_URL_CHECKBALANCE, urllib.urlencode(params))
response = urllib2.urlopen(req).read()
# CREDITS:8658.44;COUNTRY:AU;SMS:3764.54;
if response.startswith('ERROR'):
raise Exception('Error retrieving balance: %s' % response.replace('ERROR:', ''))
return dict([(p.split(':')[0].lower(), p.split(':')[1]) for p in response.split(';') if len(p) > 0])
def send_messages(self, sms_messages):
"""
Sends one or more SmsMessage objects and returns the number of sms
messages sent.
"""
if not sms_messages:
return
num_sent = 0
for message in sms_messages:
if self._send(message):
num_sent += 1
return num_sent
def _send(self, message):
"""A helper method that does the actual sending."""
charset='UTF-8'
params = {
'action' : 'sendsms',
'user' : self.get_username(),
'password' : self.get_password(),
'from' : message.from_phone,
'to' : ",".join(message.to),
'text' : message.body,
'clientcharset' : charset,
'detectcharset' : 1,
'maxsplit': int(math.ceil(len(message.body) / 160))
}
req = urllib2.Request(SMSGLOBAL_API_URL_SENDSMS, urllib.urlencode(params))
result_page = urllib2.urlopen(req).read()
results = self._parse_response(result_page)
if results is None:
if not self.fail_silently:
raise Exception("Error determining response: [" + result_page + "]")
return False
code, sendqmsgid, msgid = results
if code != '0':
if not self.fail_silently:
raise Exception("Error sending sms: [%s], extracted results(code, sendqmsgid, msgid): [%s]" % (result_page, results))
return False
else:
logger.info('SENT to: %s; sender: %s; code: %s; sendqmsgid: %s; msgid: %s; message: %s' % (
message.to,
message.from_phone,
code,
sendqmsgid,
msgid,
message.body
))
return True
def _parse_response(self, result_page):
"""
Takes a result page of sending the sms, returns an extracted tuple:
('numeric_err_code', '<sent_queued_message_id>', '<smsglobalmsgid>')
Returns None if unable to extract info from result_page, it should be
safe to assume that it was either a failed result or worse, the interface
contract has changed.
"""
# Sample result_page, single line -> "OK: 0; Sent queued message ID: 2063619577732703 SMSGlobalMsgID:6171799108850954"
resultline = result_page.splitlines()[0] # get result line
if resultline.startswith('ERROR:'):
raise Exception(resultline.replace('ERROR: ', ''))
patt = re.compile(r'^.+?:\s*(.+?)\s*;\s*Sent queued message ID:\s*(.+?)\s*SMSGlobalMsgID:(.+?)$', re.IGNORECASE)
m = patt.match(resultline)
if m:
return (m.group(1), m.group(2), m.group(3))
return None
|
stefanfoulis/django-sendsms
|
sendsms/backends/smsglobal.py
|
SmsBackend.send_messages
|
python
|
def send_messages(self, sms_messages):
if not sms_messages:
return
num_sent = 0
for message in sms_messages:
if self._send(message):
num_sent += 1
return num_sent
|
Sends one or more SmsMessage objects and returns the number of sms
messages sent.
|
train
|
https://github.com/stefanfoulis/django-sendsms/blob/375f469789866853253eceba936ebcff98e83c07/sendsms/backends/smsglobal.py#L54-L66
|
[
"def _send(self, message):\n \"\"\"A helper method that does the actual sending.\"\"\"\n charset='UTF-8'\n params = {\n 'action' : 'sendsms',\n 'user' : self.get_username(),\n 'password' : self.get_password(),\n 'from' : message.from_phone,\n 'to' : \",\".join(message.to),\n 'text' : message.body,\n 'clientcharset' : charset,\n 'detectcharset' : 1,\n 'maxsplit': int(math.ceil(len(message.body) / 160))\n }\n\n req = urllib2.Request(SMSGLOBAL_API_URL_SENDSMS, urllib.urlencode(params))\n result_page = urllib2.urlopen(req).read()\n results = self._parse_response(result_page)\n\n if results is None:\n if not self.fail_silently:\n raise Exception(\"Error determining response: [\" + result_page + \"]\")\n return False\n\n code, sendqmsgid, msgid = results\n\n if code != '0':\n if not self.fail_silently:\n raise Exception(\"Error sending sms: [%s], extracted results(code, sendqmsgid, msgid): [%s]\" % (result_page, results))\n return False\n else:\n logger.info('SENT to: %s; sender: %s; code: %s; sendqmsgid: %s; msgid: %s; message: %s' % (\n message.to,\n message.from_phone,\n code, \n sendqmsgid, \n msgid,\n message.body\n ))\n return True\n"
] |
class SmsBackend(BaseSmsBackend):
"""
A wrapper that manages the SMS Global network connection.
Sending and parsing functionality borrowed from http://namingcrisis.net/code
"""
def get_username(self):
return SMSGLOBAL_USERNAME
def get_password(self):
return SMSGLOBAL_PASSWORD
def get_balance(self):
"""
Get balance with provider.
"""
if not SMSGLOBAL_CHECK_BALANCE_COUNTRY:
raise Exception('SMSGLOBAL_CHECK_BALANCE_COUNTRY setting must be set to check balance.')
params = {
'user' : self.get_username(),
'password' : self.get_password(),
'country' : SMSGLOBAL_CHECK_BALANCE_COUNTRY,
}
req = urllib2.Request(SMSGLOBAL_API_URL_CHECKBALANCE, urllib.urlencode(params))
response = urllib2.urlopen(req).read()
# CREDITS:8658.44;COUNTRY:AU;SMS:3764.54;
if response.startswith('ERROR'):
raise Exception('Error retrieving balance: %s' % response.replace('ERROR:', ''))
return dict([(p.split(':')[0].lower(), p.split(':')[1]) for p in response.split(';') if len(p) > 0])
def send_messages(self, sms_messages):
"""
Sends one or more SmsMessage objects and returns the number of sms
messages sent.
"""
if not sms_messages:
return
num_sent = 0
for message in sms_messages:
if self._send(message):
num_sent += 1
return num_sent
def _send(self, message):
"""A helper method that does the actual sending."""
charset='UTF-8'
params = {
'action' : 'sendsms',
'user' : self.get_username(),
'password' : self.get_password(),
'from' : message.from_phone,
'to' : ",".join(message.to),
'text' : message.body,
'clientcharset' : charset,
'detectcharset' : 1,
'maxsplit': int(math.ceil(len(message.body) / 160))
}
req = urllib2.Request(SMSGLOBAL_API_URL_SENDSMS, urllib.urlencode(params))
result_page = urllib2.urlopen(req).read()
results = self._parse_response(result_page)
if results is None:
if not self.fail_silently:
raise Exception("Error determining response: [" + result_page + "]")
return False
code, sendqmsgid, msgid = results
if code != '0':
if not self.fail_silently:
raise Exception("Error sending sms: [%s], extracted results(code, sendqmsgid, msgid): [%s]" % (result_page, results))
return False
else:
logger.info('SENT to: %s; sender: %s; code: %s; sendqmsgid: %s; msgid: %s; message: %s' % (
message.to,
message.from_phone,
code,
sendqmsgid,
msgid,
message.body
))
return True
def _parse_response(self, result_page):
"""
Takes a result page of sending the sms, returns an extracted tuple:
('numeric_err_code', '<sent_queued_message_id>', '<smsglobalmsgid>')
Returns None if unable to extract info from result_page, it should be
safe to assume that it was either a failed result or worse, the interface
contract has changed.
"""
# Sample result_page, single line -> "OK: 0; Sent queued message ID: 2063619577732703 SMSGlobalMsgID:6171799108850954"
resultline = result_page.splitlines()[0] # get result line
if resultline.startswith('ERROR:'):
raise Exception(resultline.replace('ERROR: ', ''))
patt = re.compile(r'^.+?:\s*(.+?)\s*;\s*Sent queued message ID:\s*(.+?)\s*SMSGlobalMsgID:(.+?)$', re.IGNORECASE)
m = patt.match(resultline)
if m:
return (m.group(1), m.group(2), m.group(3))
return None
|
stefanfoulis/django-sendsms
|
sendsms/backends/smsglobal.py
|
SmsBackend._send
|
python
|
def _send(self, message):
charset='UTF-8'
params = {
'action' : 'sendsms',
'user' : self.get_username(),
'password' : self.get_password(),
'from' : message.from_phone,
'to' : ",".join(message.to),
'text' : message.body,
'clientcharset' : charset,
'detectcharset' : 1,
'maxsplit': int(math.ceil(len(message.body) / 160))
}
req = urllib2.Request(SMSGLOBAL_API_URL_SENDSMS, urllib.urlencode(params))
result_page = urllib2.urlopen(req).read()
results = self._parse_response(result_page)
if results is None:
if not self.fail_silently:
raise Exception("Error determining response: [" + result_page + "]")
return False
code, sendqmsgid, msgid = results
if code != '0':
if not self.fail_silently:
raise Exception("Error sending sms: [%s], extracted results(code, sendqmsgid, msgid): [%s]" % (result_page, results))
return False
else:
logger.info('SENT to: %s; sender: %s; code: %s; sendqmsgid: %s; msgid: %s; message: %s' % (
message.to,
message.from_phone,
code,
sendqmsgid,
msgid,
message.body
))
return True
|
A helper method that does the actual sending.
|
train
|
https://github.com/stefanfoulis/django-sendsms/blob/375f469789866853253eceba936ebcff98e83c07/sendsms/backends/smsglobal.py#L68-L107
|
[
"def get_username(self):\n return SMSGLOBAL_USERNAME\n",
"def get_password(self):\n return SMSGLOBAL_PASSWORD\n",
"def _parse_response(self, result_page):\n \"\"\"\n Takes a result page of sending the sms, returns an extracted tuple:\n ('numeric_err_code', '<sent_queued_message_id>', '<smsglobalmsgid>')\n Returns None if unable to extract info from result_page, it should be\n safe to assume that it was either a failed result or worse, the interface\n contract has changed.\n \"\"\"\n # Sample result_page, single line -> \"OK: 0; Sent queued message ID: 2063619577732703 SMSGlobalMsgID:6171799108850954\"\n resultline = result_page.splitlines()[0] # get result line\n if resultline.startswith('ERROR:'):\n raise Exception(resultline.replace('ERROR: ', ''))\n patt = re.compile(r'^.+?:\\s*(.+?)\\s*;\\s*Sent queued message ID:\\s*(.+?)\\s*SMSGlobalMsgID:(.+?)$', re.IGNORECASE)\n m = patt.match(resultline)\n if m:\n return (m.group(1), m.group(2), m.group(3)) \n return None "
] |
class SmsBackend(BaseSmsBackend):
"""
A wrapper that manages the SMS Global network connection.
Sending and parsing functionality borrowed from http://namingcrisis.net/code
"""
def get_username(self):
return SMSGLOBAL_USERNAME
def get_password(self):
return SMSGLOBAL_PASSWORD
def get_balance(self):
"""
Get balance with provider.
"""
if not SMSGLOBAL_CHECK_BALANCE_COUNTRY:
raise Exception('SMSGLOBAL_CHECK_BALANCE_COUNTRY setting must be set to check balance.')
params = {
'user' : self.get_username(),
'password' : self.get_password(),
'country' : SMSGLOBAL_CHECK_BALANCE_COUNTRY,
}
req = urllib2.Request(SMSGLOBAL_API_URL_CHECKBALANCE, urllib.urlencode(params))
response = urllib2.urlopen(req).read()
# CREDITS:8658.44;COUNTRY:AU;SMS:3764.54;
if response.startswith('ERROR'):
raise Exception('Error retrieving balance: %s' % response.replace('ERROR:', ''))
return dict([(p.split(':')[0].lower(), p.split(':')[1]) for p in response.split(';') if len(p) > 0])
def send_messages(self, sms_messages):
"""
Sends one or more SmsMessage objects and returns the number of sms
messages sent.
"""
if not sms_messages:
return
num_sent = 0
for message in sms_messages:
if self._send(message):
num_sent += 1
return num_sent
def _send(self, message):
"""A helper method that does the actual sending."""
charset='UTF-8'
params = {
'action' : 'sendsms',
'user' : self.get_username(),
'password' : self.get_password(),
'from' : message.from_phone,
'to' : ",".join(message.to),
'text' : message.body,
'clientcharset' : charset,
'detectcharset' : 1,
'maxsplit': int(math.ceil(len(message.body) / 160))
}
req = urllib2.Request(SMSGLOBAL_API_URL_SENDSMS, urllib.urlencode(params))
result_page = urllib2.urlopen(req).read()
results = self._parse_response(result_page)
if results is None:
if not self.fail_silently:
raise Exception("Error determining response: [" + result_page + "]")
return False
code, sendqmsgid, msgid = results
if code != '0':
if not self.fail_silently:
raise Exception("Error sending sms: [%s], extracted results(code, sendqmsgid, msgid): [%s]" % (result_page, results))
return False
else:
logger.info('SENT to: %s; sender: %s; code: %s; sendqmsgid: %s; msgid: %s; message: %s' % (
message.to,
message.from_phone,
code,
sendqmsgid,
msgid,
message.body
))
return True
def _parse_response(self, result_page):
"""
Takes a result page of sending the sms, returns an extracted tuple:
('numeric_err_code', '<sent_queued_message_id>', '<smsglobalmsgid>')
Returns None if unable to extract info from result_page, it should be
safe to assume that it was either a failed result or worse, the interface
contract has changed.
"""
# Sample result_page, single line -> "OK: 0; Sent queued message ID: 2063619577732703 SMSGlobalMsgID:6171799108850954"
resultline = result_page.splitlines()[0] # get result line
if resultline.startswith('ERROR:'):
raise Exception(resultline.replace('ERROR: ', ''))
patt = re.compile(r'^.+?:\s*(.+?)\s*;\s*Sent queued message ID:\s*(.+?)\s*SMSGlobalMsgID:(.+?)$', re.IGNORECASE)
m = patt.match(resultline)
if m:
return (m.group(1), m.group(2), m.group(3))
return None
|
stefanfoulis/django-sendsms
|
sendsms/backends/smsglobal.py
|
SmsBackend._parse_response
|
python
|
def _parse_response(self, result_page):
# Sample result_page, single line -> "OK: 0; Sent queued message ID: 2063619577732703 SMSGlobalMsgID:6171799108850954"
resultline = result_page.splitlines()[0] # get result line
if resultline.startswith('ERROR:'):
raise Exception(resultline.replace('ERROR: ', ''))
patt = re.compile(r'^.+?:\s*(.+?)\s*;\s*Sent queued message ID:\s*(.+?)\s*SMSGlobalMsgID:(.+?)$', re.IGNORECASE)
m = patt.match(resultline)
if m:
return (m.group(1), m.group(2), m.group(3))
return None
|
Takes a result page of sending the sms, returns an extracted tuple:
('numeric_err_code', '<sent_queued_message_id>', '<smsglobalmsgid>')
Returns None if unable to extract info from result_page, it should be
safe to assume that it was either a failed result or worse, the interface
contract has changed.
|
train
|
https://github.com/stefanfoulis/django-sendsms/blob/375f469789866853253eceba936ebcff98e83c07/sendsms/backends/smsglobal.py#L109-L125
| null |
class SmsBackend(BaseSmsBackend):
"""
A wrapper that manages the SMS Global network connection.
Sending and parsing functionality borrowed from http://namingcrisis.net/code
"""
def get_username(self):
return SMSGLOBAL_USERNAME
def get_password(self):
return SMSGLOBAL_PASSWORD
def get_balance(self):
"""
Get balance with provider.
"""
if not SMSGLOBAL_CHECK_BALANCE_COUNTRY:
raise Exception('SMSGLOBAL_CHECK_BALANCE_COUNTRY setting must be set to check balance.')
params = {
'user' : self.get_username(),
'password' : self.get_password(),
'country' : SMSGLOBAL_CHECK_BALANCE_COUNTRY,
}
req = urllib2.Request(SMSGLOBAL_API_URL_CHECKBALANCE, urllib.urlencode(params))
response = urllib2.urlopen(req).read()
# CREDITS:8658.44;COUNTRY:AU;SMS:3764.54;
if response.startswith('ERROR'):
raise Exception('Error retrieving balance: %s' % response.replace('ERROR:', ''))
return dict([(p.split(':')[0].lower(), p.split(':')[1]) for p in response.split(';') if len(p) > 0])
def send_messages(self, sms_messages):
"""
Sends one or more SmsMessage objects and returns the number of sms
messages sent.
"""
if not sms_messages:
return
num_sent = 0
for message in sms_messages:
if self._send(message):
num_sent += 1
return num_sent
def _send(self, message):
"""A helper method that does the actual sending."""
charset='UTF-8'
params = {
'action' : 'sendsms',
'user' : self.get_username(),
'password' : self.get_password(),
'from' : message.from_phone,
'to' : ",".join(message.to),
'text' : message.body,
'clientcharset' : charset,
'detectcharset' : 1,
'maxsplit': int(math.ceil(len(message.body) / 160))
}
req = urllib2.Request(SMSGLOBAL_API_URL_SENDSMS, urllib.urlencode(params))
result_page = urllib2.urlopen(req).read()
results = self._parse_response(result_page)
if results is None:
if not self.fail_silently:
raise Exception("Error determining response: [" + result_page + "]")
return False
code, sendqmsgid, msgid = results
if code != '0':
if not self.fail_silently:
raise Exception("Error sending sms: [%s], extracted results(code, sendqmsgid, msgid): [%s]" % (result_page, results))
return False
else:
logger.info('SENT to: %s; sender: %s; code: %s; sendqmsgid: %s; msgid: %s; message: %s' % (
message.to,
message.from_phone,
code,
sendqmsgid,
msgid,
message.body
))
return True
|
stefanfoulis/django-sendsms
|
sendsms/api.py
|
send_sms
|
python
|
def send_sms(body, from_phone, to, flash=False, fail_silently=False,
auth_user=None, auth_password=None, connection=None):
from sendsms.message import SmsMessage
connection = connection or get_connection(
username = auth_user,
password = auth_password,
fail_silently = fail_silently
)
return SmsMessage(body=body, from_phone=from_phone, to=to, \
flash=flash, connection=connection).send()
|
Easy wrapper for send a single SMS to a recipient list.
:returns: the number of SMSs sent.
|
train
|
https://github.com/stefanfoulis/django-sendsms/blob/375f469789866853253eceba936ebcff98e83c07/sendsms/api.py#L14-L28
|
[
"def get_connection(path=None, fail_silently=False, **kwargs):\n \"\"\"\n Load an sms backend and return an instance of it.\n\n :param string path: backend python path. Default: sendsms.backends.console.SmsBackend\n :param bool fail_silently: Flag to not throw exceptions on error. Default: False\n :returns: backend class instance.\n :rtype: :py:class:`~sendsms.backends.base.BaseSmsBackend` subclass\n \"\"\"\n\n path = path or getattr(settings, 'SENDSMS_BACKEND', 'sendsms.backends.locmem.SmsBackend')\n try:\n mod_name, klass_name = path.rsplit('.', 1)\n mod = import_module(mod_name)\n except AttributeError as e:\n raise ImproperlyConfigured(u'Error importing sms backend module %s: \"%s\"' % (mod_name, e))\n\n try:\n klass = getattr(mod, klass_name)\n except AttributeError:\n raise ImproperlyConfigured('Module \"%s\" does not define a \"%s\" class' % (mod_name, klass_name))\n\n return klass(fail_silently=fail_silently, **kwargs)\n",
"def send(self, fail_silently=False):\n \"\"\"\n Sends the sms message\n \"\"\"\n if not self.to:\n # Don't bother creating the connection if there's nobody to send to\n return 0\n res = self.get_connection(fail_silently).send_messages([self])\n sms_post_send.send(sender=self, to=self.to, from_phone=self.from_phone, body=self.body)\n return res\n"
] |
#-*- coding: utf-8 -*-
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
try:
# Django versions >= 1.9
from django.utils.module_loading import import_module
except ImportError:
# Django versions < 1.9
from django.utils.importlib import import_module
from sendsms.utils import load_object
def send_mass_sms(datatuple, fail_silently=False,
auth_user=None, auth_password=None, connection=None):
"""
Given a datatuple of (message, from_phone, to, flash), sends each message to each
recipient list.
:returns: the number of SMSs sent.
"""
from sendsms.message import SmsMessage
connection = connection or get_connection(
username = auth_user,
password = auth_password,
fail_silently = fail_silently
)
messages = [SmsMessage(message=message, from_phone=from_phone, to=to, flash=flash)
for message, from_phone, to, flash in datatuple]
connection.send_messages(messages)
def get_connection(path=None, fail_silently=False, **kwargs):
"""
Load an sms backend and return an instance of it.
:param string path: backend python path. Default: sendsms.backends.console.SmsBackend
:param bool fail_silently: Flag to not throw exceptions on error. Default: False
:returns: backend class instance.
:rtype: :py:class:`~sendsms.backends.base.BaseSmsBackend` subclass
"""
path = path or getattr(settings, 'SENDSMS_BACKEND', 'sendsms.backends.locmem.SmsBackend')
try:
mod_name, klass_name = path.rsplit('.', 1)
mod = import_module(mod_name)
except AttributeError as e:
raise ImproperlyConfigured(u'Error importing sms backend module %s: "%s"' % (mod_name, e))
try:
klass = getattr(mod, klass_name)
except AttributeError:
raise ImproperlyConfigured('Module "%s" does not define a "%s" class' % (mod_name, klass_name))
return klass(fail_silently=fail_silently, **kwargs)
|
stefanfoulis/django-sendsms
|
sendsms/api.py
|
send_mass_sms
|
python
|
def send_mass_sms(datatuple, fail_silently=False,
auth_user=None, auth_password=None, connection=None):
from sendsms.message import SmsMessage
connection = connection or get_connection(
username = auth_user,
password = auth_password,
fail_silently = fail_silently
)
messages = [SmsMessage(message=message, from_phone=from_phone, to=to, flash=flash)
for message, from_phone, to, flash in datatuple]
connection.send_messages(messages)
|
Given a datatuple of (message, from_phone, to, flash), sends each message to each
recipient list.
:returns: the number of SMSs sent.
|
train
|
https://github.com/stefanfoulis/django-sendsms/blob/375f469789866853253eceba936ebcff98e83c07/sendsms/api.py#L31-L48
|
[
"def get_connection(path=None, fail_silently=False, **kwargs):\n \"\"\"\n Load an sms backend and return an instance of it.\n\n :param string path: backend python path. Default: sendsms.backends.console.SmsBackend\n :param bool fail_silently: Flag to not throw exceptions on error. Default: False\n :returns: backend class instance.\n :rtype: :py:class:`~sendsms.backends.base.BaseSmsBackend` subclass\n \"\"\"\n\n path = path or getattr(settings, 'SENDSMS_BACKEND', 'sendsms.backends.locmem.SmsBackend')\n try:\n mod_name, klass_name = path.rsplit('.', 1)\n mod = import_module(mod_name)\n except AttributeError as e:\n raise ImproperlyConfigured(u'Error importing sms backend module %s: \"%s\"' % (mod_name, e))\n\n try:\n klass = getattr(mod, klass_name)\n except AttributeError:\n raise ImproperlyConfigured('Module \"%s\" does not define a \"%s\" class' % (mod_name, klass_name))\n\n return klass(fail_silently=fail_silently, **kwargs)\n"
] |
#-*- coding: utf-8 -*-
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
try:
# Django versions >= 1.9
from django.utils.module_loading import import_module
except ImportError:
# Django versions < 1.9
from django.utils.importlib import import_module
from sendsms.utils import load_object
def send_sms(body, from_phone, to, flash=False, fail_silently=False,
auth_user=None, auth_password=None, connection=None):
"""
Easy wrapper for send a single SMS to a recipient list.
:returns: the number of SMSs sent.
"""
from sendsms.message import SmsMessage
connection = connection or get_connection(
username = auth_user,
password = auth_password,
fail_silently = fail_silently
)
return SmsMessage(body=body, from_phone=from_phone, to=to, \
flash=flash, connection=connection).send()
def get_connection(path=None, fail_silently=False, **kwargs):
"""
Load an sms backend and return an instance of it.
:param string path: backend python path. Default: sendsms.backends.console.SmsBackend
:param bool fail_silently: Flag to not throw exceptions on error. Default: False
:returns: backend class instance.
:rtype: :py:class:`~sendsms.backends.base.BaseSmsBackend` subclass
"""
path = path or getattr(settings, 'SENDSMS_BACKEND', 'sendsms.backends.locmem.SmsBackend')
try:
mod_name, klass_name = path.rsplit('.', 1)
mod = import_module(mod_name)
except AttributeError as e:
raise ImproperlyConfigured(u'Error importing sms backend module %s: "%s"' % (mod_name, e))
try:
klass = getattr(mod, klass_name)
except AttributeError:
raise ImproperlyConfigured('Module "%s" does not define a "%s" class' % (mod_name, klass_name))
return klass(fail_silently=fail_silently, **kwargs)
|
stefanfoulis/django-sendsms
|
sendsms/api.py
|
get_connection
|
python
|
def get_connection(path=None, fail_silently=False, **kwargs):
path = path or getattr(settings, 'SENDSMS_BACKEND', 'sendsms.backends.locmem.SmsBackend')
try:
mod_name, klass_name = path.rsplit('.', 1)
mod = import_module(mod_name)
except AttributeError as e:
raise ImproperlyConfigured(u'Error importing sms backend module %s: "%s"' % (mod_name, e))
try:
klass = getattr(mod, klass_name)
except AttributeError:
raise ImproperlyConfigured('Module "%s" does not define a "%s" class' % (mod_name, klass_name))
return klass(fail_silently=fail_silently, **kwargs)
|
Load an sms backend and return an instance of it.
:param string path: backend python path. Default: sendsms.backends.console.SmsBackend
:param bool fail_silently: Flag to not throw exceptions on error. Default: False
:returns: backend class instance.
:rtype: :py:class:`~sendsms.backends.base.BaseSmsBackend` subclass
|
train
|
https://github.com/stefanfoulis/django-sendsms/blob/375f469789866853253eceba936ebcff98e83c07/sendsms/api.py#L51-L73
| null |
#-*- coding: utf-8 -*-
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
try:
# Django versions >= 1.9
from django.utils.module_loading import import_module
except ImportError:
# Django versions < 1.9
from django.utils.importlib import import_module
from sendsms.utils import load_object
def send_sms(body, from_phone, to, flash=False, fail_silently=False,
auth_user=None, auth_password=None, connection=None):
"""
Easy wrapper for send a single SMS to a recipient list.
:returns: the number of SMSs sent.
"""
from sendsms.message import SmsMessage
connection = connection or get_connection(
username = auth_user,
password = auth_password,
fail_silently = fail_silently
)
return SmsMessage(body=body, from_phone=from_phone, to=to, \
flash=flash, connection=connection).send()
def send_mass_sms(datatuple, fail_silently=False,
auth_user=None, auth_password=None, connection=None):
"""
Given a datatuple of (message, from_phone, to, flash), sends each message to each
recipient list.
:returns: the number of SMSs sent.
"""
from sendsms.message import SmsMessage
connection = connection or get_connection(
username = auth_user,
password = auth_password,
fail_silently = fail_silently
)
messages = [SmsMessage(message=message, from_phone=from_phone, to=to, flash=flash)
for message, from_phone, to, flash in datatuple]
connection.send_messages(messages)
|
stefanfoulis/django-sendsms
|
sendsms/backends/smssluzbacz.py
|
SmsBackend.open
|
python
|
def open(self):
self.client = SmsGateApi(getattr(settings, 'SMS_SLUZBA_API_LOGIN', ''),
getattr(settings, 'SMS_SLUZBA_API_PASSWORD', ''),
getattr(settings, 'SMS_SLUZBA_API_TIMEOUT', 2),
getattr(settings, 'SMS_SLUZBA_API_USE_SSL', True))
|
Initializes sms.sluzba.cz API library.
|
train
|
https://github.com/stefanfoulis/django-sendsms/blob/375f469789866853253eceba936ebcff98e83c07/sendsms/backends/smssluzbacz.py#L59-L64
| null |
class SmsBackend(BaseSmsBackend):
"""SmsBackend for sms.sluzba.cz API.
settings.py configuration constants:
SMS_SLUZBA_API_LOGIN - sms.sluzba.cz login
SMS_SLUZBA_API_PASSWORD - sms.sluzba.cz password
SMS_SLUZBA_API_TIMEOUT - connection timeout to sms.sluzba.cz in seconds
SMS_SLUZBA_API_USE_SSL - whether to use ssl via http or not
SMS_SLUZBA_API_USE_POST - whether to use GET or POST http method
"""
def __init__(self, fail_silently=False, **kwargs):
super(SmsBackend, self).__init__(fail_silently=fail_silently, **kwargs)
self.open()
def __del__(self):
self.close()
def close(self):
"""Cleaning up the reference for sms.sluzba.cz API library."""
self.client = None
def send_messages(self, messages):
"""Sending SMS messages via sms.sluzba.cz API.
Note:
This method returns number of actually sent sms messages
not number of SmsMessage instances processed.
:param messages: list of sms messages
:type messages: list of sendsms.message.SmsMessage instances
:returns: number of sent sms messages
:rtype: int
"""
count = 0
for message in messages:
message_body = unicodedata.normalize('NFKD', unicode(message.body)).encode('ascii', 'ignore')
for tel_number in message.to:
try:
self.client.send(tel_number, message_body, getattr(settings, 'SMS_SLUZBA_API_USE_POST', True))
except Exception:
if self.fail_silently:
log.exception('Error while sending sms via sms.sluzba.cz backend API.')
else:
raise
else:
count += 1
return count
|
stefanfoulis/django-sendsms
|
sendsms/backends/smssluzbacz.py
|
SmsBackend.send_messages
|
python
|
def send_messages(self, messages):
count = 0
for message in messages:
message_body = unicodedata.normalize('NFKD', unicode(message.body)).encode('ascii', 'ignore')
for tel_number in message.to:
try:
self.client.send(tel_number, message_body, getattr(settings, 'SMS_SLUZBA_API_USE_POST', True))
except Exception:
if self.fail_silently:
log.exception('Error while sending sms via sms.sluzba.cz backend API.')
else:
raise
else:
count += 1
return count
|
Sending SMS messages via sms.sluzba.cz API.
Note:
This method returns number of actually sent sms messages
not number of SmsMessage instances processed.
:param messages: list of sms messages
:type messages: list of sendsms.message.SmsMessage instances
:returns: number of sent sms messages
:rtype: int
|
train
|
https://github.com/stefanfoulis/django-sendsms/blob/375f469789866853253eceba936ebcff98e83c07/sendsms/backends/smssluzbacz.py#L70-L97
| null |
class SmsBackend(BaseSmsBackend):
"""SmsBackend for sms.sluzba.cz API.
settings.py configuration constants:
SMS_SLUZBA_API_LOGIN - sms.sluzba.cz login
SMS_SLUZBA_API_PASSWORD - sms.sluzba.cz password
SMS_SLUZBA_API_TIMEOUT - connection timeout to sms.sluzba.cz in seconds
SMS_SLUZBA_API_USE_SSL - whether to use ssl via http or not
SMS_SLUZBA_API_USE_POST - whether to use GET or POST http method
"""
def __init__(self, fail_silently=False, **kwargs):
super(SmsBackend, self).__init__(fail_silently=fail_silently, **kwargs)
self.open()
def __del__(self):
self.close()
def open(self):
"""Initializes sms.sluzba.cz API library."""
self.client = SmsGateApi(getattr(settings, 'SMS_SLUZBA_API_LOGIN', ''),
getattr(settings, 'SMS_SLUZBA_API_PASSWORD', ''),
getattr(settings, 'SMS_SLUZBA_API_TIMEOUT', 2),
getattr(settings, 'SMS_SLUZBA_API_USE_SSL', True))
def close(self):
"""Cleaning up the reference for sms.sluzba.cz API library."""
self.client = None
|
stefanfoulis/django-sendsms
|
sendsms/backends/console.py
|
SmsBackend.send_messages
|
python
|
def send_messages(self, messages):
if not messages:
return
self._lock.acquire()
try:
# The try-except is nested to allow for
# Python 2.4 support (Refs #12147)
try:
stream_created = self.open()
for message in messages:
self.stream.write(render_message(message))
self.stream.write('\n')
self.stream.write('-'*79)
self.stream.write('\n')
self.stream.flush() # flush after each message
if stream_created:
self.close()
except:
if not self.fail_silently:
raise
finally:
self._lock.release()
return len(messages)
|
Write all messages to the stream in a thread-safe way.
|
train
|
https://github.com/stefanfoulis/django-sendsms/blob/375f469789866853253eceba936ebcff98e83c07/sendsms/backends/console.py#L18-L41
|
[
"def render_message(message):\n return u\"\"\"from: %(from)s\\nto: %(to)s\\nflash: %(flash)s\\n%(body)s\"\"\" % {\n 'from': message.from_phone,\n 'to': \", \".join(message.to),\n 'flash': message.flash,\n 'body': message.body,\n }\n",
"def open(self):\n \"\"\"\n Open a network connection.\n\n This method can be overwritten by backend implementations to open a network connection.\n It's up to the backend implementation to track the status of\n a network connection if it's needed by the backend.\n This method can be called by applications to force a single\n network connection to be used when sending multiple SMSs.\n\n The default implementation does nothing.\n \"\"\"\n pass\n",
"def close(self):\n \"\"\"Close a network connection\"\"\"\n pass\n"
] |
class SmsBackend(BaseSmsBackend):
def __init__(self, *args, **kwargs):
self.stream = kwargs.pop('stream', sys.stdout)
self._lock = threading.RLock()
super(SmsBackend, self).__init__(*args, **kwargs)
|
stefanfoulis/django-sendsms
|
sendsms/backends/esendex.py
|
SmsBackend._parse_response
|
python
|
def _parse_response(self, response):
response_dict = {}
for line in response.splitlines():
key, value = response.split("=", 1)
response_dict[key] = value
return response_dict
|
Parse http raw respone into python
dictionary object.
:param str response: http response
:returns: response dict
:rtype: dict
|
train
|
https://github.com/stefanfoulis/django-sendsms/blob/375f469789866853253eceba936ebcff98e83c07/sendsms/backends/esendex.py#L58-L72
| null |
class SmsBackend(BaseSmsBackend):
"""
SMS Backend for esendex.es provider.
The methods "get_xxxxxx" serve to facilitate the inheritance. Thus if a private
project in the access data are dynamic, and are stored in the database. A child
class overrides the method "get_xxxx" to return data stored in the database.
"""
def get_username(self):
return ESENDEX_USERNAME
def get_password(self):
return ESENDEX_PASSWORD
def get_account(self):
return ESENDEX_ACCOUNT
def _parse_response(self, response):
"""
Parse http raw respone into python
dictionary object.
:param str response: http response
:returns: response dict
:rtype: dict
"""
response_dict = {}
for line in response.splitlines():
key, value = response.split("=", 1)
response_dict[key] = value
return response_dict
def _send(self, message):
"""
Private method to send one message.
:param SmsMessage message: SmsMessage class instance.
:returns: True if message is sent else False
:rtype: bool
"""
params = {
'EsendexUsername': self.get_username(),
'EsendexPassword': self.get_password(),
'EsendexAccount': self.get_account(),
'EsendexOriginator': message.from_phone,
'EsendexRecipient': ",".join(message.to),
'EsendexBody': message.body,
'EsendexPlainText':'1'
}
if ESENDEX_SANDBOX:
params['EsendexTest'] = '1'
response = requests.post(ESENDEX_API_URL, params)
if response.status_code != 200:
if not self.fail_silently:
raise Exception('Bad status code')
else:
return False
if not response.content.startswith(b'Result'):
if not self.fail_silently:
raise Exception('Bad result')
else:
return False
response = self._parse_response(response.content.decode('utf8'))
if ESENDEX_SANDBOX and response['Result'] == 'Test':
return True
else:
if response['Result'].startswith('OK'):
return True
else:
if not self.fail_silently:
raise Exception('Bad result')
return False
def send_messages(self, messages):
"""
Send messages.
:param list messages: List of SmsMessage instances.
:returns: number of messages sended successful.
:rtype: int
"""
counter = 0
for message in messages:
res = self._send(message)
if res:
counter += 1
return counter
|
stefanfoulis/django-sendsms
|
sendsms/backends/esendex.py
|
SmsBackend._send
|
python
|
def _send(self, message):
params = {
'EsendexUsername': self.get_username(),
'EsendexPassword': self.get_password(),
'EsendexAccount': self.get_account(),
'EsendexOriginator': message.from_phone,
'EsendexRecipient': ",".join(message.to),
'EsendexBody': message.body,
'EsendexPlainText':'1'
}
if ESENDEX_SANDBOX:
params['EsendexTest'] = '1'
response = requests.post(ESENDEX_API_URL, params)
if response.status_code != 200:
if not self.fail_silently:
raise Exception('Bad status code')
else:
return False
if not response.content.startswith(b'Result'):
if not self.fail_silently:
raise Exception('Bad result')
else:
return False
response = self._parse_response(response.content.decode('utf8'))
if ESENDEX_SANDBOX and response['Result'] == 'Test':
return True
else:
if response['Result'].startswith('OK'):
return True
else:
if not self.fail_silently:
raise Exception('Bad result')
return False
|
Private method to send one message.
:param SmsMessage message: SmsMessage class instance.
:returns: True if message is sent else False
:rtype: bool
|
train
|
https://github.com/stefanfoulis/django-sendsms/blob/375f469789866853253eceba936ebcff98e83c07/sendsms/backends/esendex.py#L74-L119
|
[
"def get_username(self):\n return ESENDEX_USERNAME\n",
"def get_password(self):\n return ESENDEX_PASSWORD\n",
"def get_account(self):\n return ESENDEX_ACCOUNT\n",
"def _parse_response(self, response):\n \"\"\"\n Parse http raw respone into python\n dictionary object.\n\n :param str response: http response\n :returns: response dict\n :rtype: dict\n \"\"\"\n\n response_dict = {}\n for line in response.splitlines():\n key, value = response.split(\"=\", 1)\n response_dict[key] = value\n return response_dict\n"
] |
class SmsBackend(BaseSmsBackend):
"""
SMS Backend for esendex.es provider.
The methods "get_xxxxxx" serve to facilitate the inheritance. Thus if a private
project in the access data are dynamic, and are stored in the database. A child
class overrides the method "get_xxxx" to return data stored in the database.
"""
def get_username(self):
return ESENDEX_USERNAME
def get_password(self):
return ESENDEX_PASSWORD
def get_account(self):
return ESENDEX_ACCOUNT
def _parse_response(self, response):
"""
Parse http raw respone into python
dictionary object.
:param str response: http response
:returns: response dict
:rtype: dict
"""
response_dict = {}
for line in response.splitlines():
key, value = response.split("=", 1)
response_dict[key] = value
return response_dict
def _send(self, message):
"""
Private method to send one message.
:param SmsMessage message: SmsMessage class instance.
:returns: True if message is sent else False
:rtype: bool
"""
params = {
'EsendexUsername': self.get_username(),
'EsendexPassword': self.get_password(),
'EsendexAccount': self.get_account(),
'EsendexOriginator': message.from_phone,
'EsendexRecipient': ",".join(message.to),
'EsendexBody': message.body,
'EsendexPlainText':'1'
}
if ESENDEX_SANDBOX:
params['EsendexTest'] = '1'
response = requests.post(ESENDEX_API_URL, params)
if response.status_code != 200:
if not self.fail_silently:
raise Exception('Bad status code')
else:
return False
if not response.content.startswith(b'Result'):
if not self.fail_silently:
raise Exception('Bad result')
else:
return False
response = self._parse_response(response.content.decode('utf8'))
if ESENDEX_SANDBOX and response['Result'] == 'Test':
return True
else:
if response['Result'].startswith('OK'):
return True
else:
if not self.fail_silently:
raise Exception('Bad result')
return False
def send_messages(self, messages):
"""
Send messages.
:param list messages: List of SmsMessage instances.
:returns: number of messages sended successful.
:rtype: int
"""
counter = 0
for message in messages:
res = self._send(message)
if res:
counter += 1
return counter
|
stefanfoulis/django-sendsms
|
sendsms/utils.py
|
load_object
|
python
|
def load_object(import_path):
if '.' not in import_path:
raise TypeError(
"'import_path' argument to 'load_object' must "
"contain at least one dot."
)
module_name, object_name = import_path.rsplit('.', 1)
module = import_module(module_name)
return getattr(module, object_name)
|
Shamelessly stolen from https://github.com/ojii/django-load
Loads an object from an 'import_path', like in MIDDLEWARE_CLASSES and the
likes.
Import paths should be: "mypackage.mymodule.MyObject". It then imports the
module up until the last dot and tries to get the attribute after that dot
from the imported module.
If the import path does not contain any dots, a TypeError is raised.
If the module cannot be imported, an ImportError is raised.
If the attribute does not exist in the module, a AttributeError is raised.
|
train
|
https://github.com/stefanfoulis/django-sendsms/blob/375f469789866853253eceba936ebcff98e83c07/sendsms/utils.py#L8-L32
| null |
#-*- coding: utf-8 -*-
try:
from importlib import import_module
except ImportError:
from django.utils.importlib import import_module
def load_object(import_path):
"""
Shamelessly stolen from https://github.com/ojii/django-load
Loads an object from an 'import_path', like in MIDDLEWARE_CLASSES and the
likes.
Import paths should be: "mypackage.mymodule.MyObject". It then imports the
module up until the last dot and tries to get the attribute after that dot
from the imported module.
If the import path does not contain any dots, a TypeError is raised.
If the module cannot be imported, an ImportError is raised.
If the attribute does not exist in the module, a AttributeError is raised.
"""
if '.' not in import_path:
raise TypeError(
"'import_path' argument to 'load_object' must "
"contain at least one dot."
)
module_name, object_name = import_path.rsplit('.', 1)
module = import_module(module_name)
return getattr(module, object_name)
|
stefanfoulis/django-sendsms
|
sendsms/message.py
|
SmsMessage.send
|
python
|
def send(self, fail_silently=False):
if not self.to:
# Don't bother creating the connection if there's nobody to send to
return 0
res = self.get_connection(fail_silently).send_messages([self])
sms_post_send.send(sender=self, to=self.to, from_phone=self.from_phone, body=self.body)
return res
|
Sends the sms message
|
train
|
https://github.com/stefanfoulis/django-sendsms/blob/375f469789866853253eceba936ebcff98e83c07/sendsms/message.py#L30-L39
|
[
"def get_connection(self, fail_silently=False):\n if not self.connection:\n self.connection = get_connection(fail_silently=fail_silently)\n return self.connection\n"
] |
class SmsMessage(object):
"""
A sms message
"""
def __init__(self, body, from_phone=None, to=None, flash=False, connection=None):
"""
Initialize a single SMS message (which can be sent to multiple recipients)
"""
if to:
#assert not isinstance(to, basetring), '"to" argument must be a list or tuple'
self.to = list(to)
else:
self.to = []
self.from_phone = from_phone or getattr(settings, 'SENDSMS_DEFAULT_FROM_PHONE', '')
self.body = body
self.flash = flash
self.connection = connection
def get_connection(self, fail_silently=False):
if not self.connection:
self.connection = get_connection(fail_silently=fail_silently)
return self.connection
|
stefanfoulis/django-sendsms
|
sendsms/backends/nexmo.py
|
SmsBackend._send
|
python
|
def _send(self, message):
params = {
'from': message.from_phone,
'to': ",".join(message.to),
'text': message.body,
'api_key': self.get_api_key(),
'api_secret': self.get_api_secret(),
}
print(params)
logger.debug("POST to %r with body: %r", NEXMO_API_URL, params)
return self.parse(NEXMO_API_URL, requests.post(NEXMO_API_URL, data=params))
|
A helper method that does the actual sending
:param SmsMessage message: SmsMessage class instance.
:returns: True if message is sent else False
:rtype: bool
|
train
|
https://github.com/stefanfoulis/django-sendsms/blob/375f469789866853253eceba936ebcff98e83c07/sendsms/backends/nexmo.py#L120-L142
|
[
"def get_api_key(self):\n return NEXMO_API_KEY\n",
"def get_api_secret(self):\n return NEXMO_API_SECRET\n",
"def parse(self, host, response):\n if not response.status_code == 200:\n if self.fail_silently:\n logger.warning(\n \"Error: %s %r\", response.status_code, response.content\n )\n return False\n raise Error(\"Error: %s %r\", response.status_code, response.content)\n\n\n status_code = int(response.json().get(\"messages\")[0].get(\"status\"))\n\n if status_code == 0:\n return True, response\n\n error_type = nexmo_error_codes.get(status_code)\n\n if self.fail_silently:\n logger.warning(\n \"Error: %s %r\", response.status_code, response.content\n )\n return False, requests\n\n raise ClientError(\"Error Code {status_code}: {text}: {meaning}\".format(\n status_code=status_code,\n text=error_type[0],\n meaning=error_type[1],\n ))\n"
] |
class SmsBackend(BaseSmsBackend):
def get_api_key(self):
return NEXMO_API_KEY
def get_api_secret(self):
return NEXMO_API_SECRET
def _parse_response(self, response):
"""
Parse http raw respone into python
dictionary object.
:param str response: http response
:returns: response dict
:rtype: dict
"""
response_dict = {}
for line in response.splitlines():
key, value = response.split("=", 1)
response_dict[key] = value
return response_dict
def parse(self, host, response):
if not response.status_code == 200:
if self.fail_silently:
logger.warning(
"Error: %s %r", response.status_code, response.content
)
return False
raise Error("Error: %s %r", response.status_code, response.content)
status_code = int(response.json().get("messages")[0].get("status"))
if status_code == 0:
return True, response
error_type = nexmo_error_codes.get(status_code)
if self.fail_silently:
logger.warning(
"Error: %s %r", response.status_code, response.content
)
return False, requests
raise ClientError("Error Code {status_code}: {text}: {meaning}".format(
status_code=status_code,
text=error_type[0],
meaning=error_type[1],
))
def send_messages(self, messages):
"""
Send messages.
:param list messages: List of SmsMessage instances.
:returns: number of messages sended successful.
:rtype: int
"""
counter = 0
for message in messages:
res, _ = self._send(message)
if res:
counter += 1
return counter
|
stefanfoulis/django-sendsms
|
sendsms/backends/nexmo.py
|
SmsBackend.send_messages
|
python
|
def send_messages(self, messages):
counter = 0
for message in messages:
res, _ = self._send(message)
if res:
counter += 1
return counter
|
Send messages.
:param list messages: List of SmsMessage instances.
:returns: number of messages sended successful.
:rtype: int
|
train
|
https://github.com/stefanfoulis/django-sendsms/blob/375f469789866853253eceba936ebcff98e83c07/sendsms/backends/nexmo.py#L144-L158
|
[
"def _send(self, message):\n \"\"\"\n A helper method that does the actual sending\n\n :param SmsMessage message: SmsMessage class instance.\n :returns: True if message is sent else False\n :rtype: bool\n \"\"\"\n\n params = {\n 'from': message.from_phone, \n 'to': \",\".join(message.to),\n 'text': message.body,\n 'api_key': self.get_api_key(),\n 'api_secret': self.get_api_secret(),\n }\n\n\n print(params)\n\n logger.debug(\"POST to %r with body: %r\", NEXMO_API_URL, params)\n\n return self.parse(NEXMO_API_URL, requests.post(NEXMO_API_URL, data=params))\n"
] |
class SmsBackend(BaseSmsBackend):
def get_api_key(self):
return NEXMO_API_KEY
def get_api_secret(self):
return NEXMO_API_SECRET
def _parse_response(self, response):
"""
Parse http raw respone into python
dictionary object.
:param str response: http response
:returns: response dict
:rtype: dict
"""
response_dict = {}
for line in response.splitlines():
key, value = response.split("=", 1)
response_dict[key] = value
return response_dict
def parse(self, host, response):
if not response.status_code == 200:
if self.fail_silently:
logger.warning(
"Error: %s %r", response.status_code, response.content
)
return False
raise Error("Error: %s %r", response.status_code, response.content)
status_code = int(response.json().get("messages")[0].get("status"))
if status_code == 0:
return True, response
error_type = nexmo_error_codes.get(status_code)
if self.fail_silently:
logger.warning(
"Error: %s %r", response.status_code, response.content
)
return False, requests
raise ClientError("Error Code {status_code}: {text}: {meaning}".format(
status_code=status_code,
text=error_type[0],
meaning=error_type[1],
))
def _send(self, message):
"""
A helper method that does the actual sending
:param SmsMessage message: SmsMessage class instance.
:returns: True if message is sent else False
:rtype: bool
"""
params = {
'from': message.from_phone,
'to': ",".join(message.to),
'text': message.body,
'api_key': self.get_api_key(),
'api_secret': self.get_api_secret(),
}
print(params)
logger.debug("POST to %r with body: %r", NEXMO_API_URL, params)
return self.parse(NEXMO_API_URL, requests.post(NEXMO_API_URL, data=params))
|
TomAugspurger/engarde
|
engarde/checks.py
|
none_missing
|
python
|
def none_missing(df, columns=None):
if columns is None:
columns = df.columns
try:
assert not df[columns].isnull().any().any()
except AssertionError as e:
missing = df[columns].isnull()
msg = generic.bad_locations(missing)
e.args = msg
raise
return df
|
Asserts that there are no missing values (NaNs) in the DataFrame.
Parameters
----------
df : DataFrame
columns : list
list of columns to restrict the check to
Returns
-------
df : DataFrame
same as the original
|
train
|
https://github.com/TomAugspurger/engarde/blob/e7ea040cf0d20aee7ca4375b8c27caa2d9e43945/engarde/checks.py#L20-L44
|
[
"def bad_locations(df):\n columns = df.columns\n all_locs = chain.from_iterable(zip(df.index, cycle([col])) for col in columns)\n bad = pd.Series(list(all_locs))[np.asarray(df).ravel(1)]\n msg = bad.values\n return msg\n"
] |
# -*- coding: utf-8 -*-
"""
checks.py
Each function in here should
- Take a DataFrame as its first argument, maybe optional arguments
- Makes its assert on the result
- Return the original DataFrame
"""
import numpy as np
import pandas as pd
import pandas.util.testing as tm
import six
from engarde import generic
from engarde.generic import verify, verify_all, verify_any
def is_monotonic(df, items=None, increasing=None, strict=False):
"""
Asserts that the DataFrame is monotonic.
Parameters
==========
df : Series or DataFrame
items : dict
mapping columns to conditions (increasing, strict)
increasing : None or bool
None is either increasing or decreasing.
strict : whether the comparison should be strict
Returns
=======
df : DataFrame
"""
if items is None:
items = {k: (increasing, strict) for k in df}
for col, (increasing, strict) in items.items():
s = pd.Index(df[col])
if increasing:
good = getattr(s, 'is_monotonic_increasing')
elif increasing is None:
good = getattr(s, 'is_monotonic') | getattr(s, 'is_monotonic_decreasing')
else:
good = getattr(s, 'is_monotonic_decreasing')
if strict:
if increasing:
good = good & (s.to_series().diff().dropna() > 0).all()
elif increasing is None:
good = good & ((s.to_series().diff().dropna() > 0).all() |
(s.to_series().diff().dropna() < 0).all())
else:
good = good & (s.to_series().diff().dropna() < 0).all()
if not good:
raise AssertionError
return df
def is_shape(df, shape):
"""
Asserts that the DataFrame is of a known shape.
Parameters
==========
df : DataFrame
shape : tuple
(n_rows, n_columns). Use None or -1 if you don't care
about a dimension.
Returns
=======
df : DataFrame
"""
try:
check = np.all(np.equal(df.shape, shape) | (np.equal(shape, [-1, -1]) |
np.equal(shape, [None, None])))
assert check
except AssertionError as e:
msg = ("Expected shape: {}\n"
"\t\tActual shape: {}".format(shape, df.shape))
e.args = (msg,)
raise
return df
def unique(df, columns=None):
"""
Asserts that columns in the DataFrame only have unique values.
Parameters
----------
df : DataFrame
columns : list
list of columns to restrict the check to. If None, check all columns.
Returns
-------
df : DataFrame
same as the original
"""
if columns is None:
columns = df.columns
for col in columns:
if not df[col].is_unique:
raise AssertionError("Column {!r} contains non-unique values".format(col))
return df
def unique_index(df):
"""
Assert that the index is unique
Parameters
==========
df : DataFrame
Returns
=======
df : DataFrame
"""
try:
assert df.index.is_unique
except AssertionError as e:
e.args = df.index.get_duplicates()
raise
return df
def within_set(df, items=None):
"""
Assert that df is a subset of items
Parameters
==========
df : DataFrame
items : dict
mapping of columns (k) to array-like of values (v) that
``df[k]`` is expected to be a subset of
Returns
=======
df : DataFrame
"""
for k, v in items.items():
if not df[k].isin(v).all():
bad = df.loc[~df[k].isin(v), k]
raise AssertionError('Not in set', bad)
return df
def within_range(df, items=None):
"""
Assert that a DataFrame is within a range.
Parameters
==========
df : DataFame
items : dict
mapping of columns (k) to a (low, high) tuple (v)
that ``df[k]`` is expected to be between.
Returns
=======
df : DataFrame
"""
for k, (lower, upper) in items.items():
if (lower > df[k]).any() or (upper < df[k]).any():
bad = (lower > df[k]) | (upper < df[k])
raise AssertionError("Outside range", bad)
return df
def within_n_std(df, n=3):
"""
Assert that every value is within ``n`` standard
deviations of its column's mean.
Parameters
==========
df : DataFame
n : int
number of standard deviations from the mean
Returns
=======
df : DataFrame
"""
means = df.mean()
stds = df.std()
inliers = (np.abs(df[means.index] - means) < n * stds)
if not np.all(inliers):
msg = generic.bad_locations(~inliers)
raise AssertionError(msg)
return df
def has_dtypes(df, items):
"""
Assert that a DataFrame has ``dtypes``
Parameters
==========
df: DataFrame
items: dict
mapping of columns to dtype.
Returns
=======
df : DataFrame
"""
dtypes = df.dtypes
for k, v in items.items():
if not dtypes[k] == v:
raise AssertionError("{} has the wrong dtype. Should be ({}), is ({})".format(k, v,dtypes[k]))
return df
def one_to_many(df, unitcol, manycol):
"""
Assert that a many-to-one relationship is preserved between two
columns. For example, a retail store will have have distinct
departments, each with several employees. If each employee may
only work in a single department, then the relationship of the
department to the employees is one to many.
Parameters
==========
df : DataFrame
unitcol : str
The column that encapulates the groups in ``manycol``.
manycol : str
The column that must remain unique in the distict pairs
between ``manycol`` and ``unitcol``
Returns
=======
df : DataFrame
"""
subset = df[[manycol, unitcol]].drop_duplicates()
for many in subset[manycol].unique():
if subset[subset[manycol] == many].shape[0] > 1:
msg = "{} in {} has multiple values for {}".format(many, manycol, unitcol)
raise AssertionError(msg)
return df
def is_same_as(df, df_to_compare, **kwargs):
"""
Assert that two pandas dataframes are the equal
Parameters
==========
df : pandas DataFrame
df_to_compare : pandas DataFrame
**kwargs : dict
keyword arguments passed through to panda's ``assert_frame_equal``
Returns
=======
df : DataFrame
"""
try:
tm.assert_frame_equal(df, df_to_compare, **kwargs)
except AssertionError as exc:
six.raise_from(AssertionError("DataFrames are not equal"), exc)
return df
__all__ = ['is_monotonic', 'is_same_as', 'is_shape', 'none_missing',
'unique_index', 'within_n_std', 'within_range', 'within_set',
'has_dtypes', 'verify', 'verify_all', 'verify_any',
'one_to_many','is_same_as',]
|
TomAugspurger/engarde
|
engarde/checks.py
|
is_monotonic
|
python
|
def is_monotonic(df, items=None, increasing=None, strict=False):
if items is None:
items = {k: (increasing, strict) for k in df}
for col, (increasing, strict) in items.items():
s = pd.Index(df[col])
if increasing:
good = getattr(s, 'is_monotonic_increasing')
elif increasing is None:
good = getattr(s, 'is_monotonic') | getattr(s, 'is_monotonic_decreasing')
else:
good = getattr(s, 'is_monotonic_decreasing')
if strict:
if increasing:
good = good & (s.to_series().diff().dropna() > 0).all()
elif increasing is None:
good = good & ((s.to_series().diff().dropna() > 0).all() |
(s.to_series().diff().dropna() < 0).all())
else:
good = good & (s.to_series().diff().dropna() < 0).all()
if not good:
raise AssertionError
return df
|
Asserts that the DataFrame is monotonic.
Parameters
==========
df : Series or DataFrame
items : dict
mapping columns to conditions (increasing, strict)
increasing : None or bool
None is either increasing or decreasing.
strict : whether the comparison should be strict
Returns
=======
df : DataFrame
|
train
|
https://github.com/TomAugspurger/engarde/blob/e7ea040cf0d20aee7ca4375b8c27caa2d9e43945/engarde/checks.py#L46-L85
| null |
# -*- coding: utf-8 -*-
"""
checks.py
Each function in here should
- Take a DataFrame as its first argument, maybe optional arguments
- Makes its assert on the result
- Return the original DataFrame
"""
import numpy as np
import pandas as pd
import pandas.util.testing as tm
import six
from engarde import generic
from engarde.generic import verify, verify_all, verify_any
def none_missing(df, columns=None):
"""
Asserts that there are no missing values (NaNs) in the DataFrame.
Parameters
----------
df : DataFrame
columns : list
list of columns to restrict the check to
Returns
-------
df : DataFrame
same as the original
"""
if columns is None:
columns = df.columns
try:
assert not df[columns].isnull().any().any()
except AssertionError as e:
missing = df[columns].isnull()
msg = generic.bad_locations(missing)
e.args = msg
raise
return df
def is_shape(df, shape):
"""
Asserts that the DataFrame is of a known shape.
Parameters
==========
df : DataFrame
shape : tuple
(n_rows, n_columns). Use None or -1 if you don't care
about a dimension.
Returns
=======
df : DataFrame
"""
try:
check = np.all(np.equal(df.shape, shape) | (np.equal(shape, [-1, -1]) |
np.equal(shape, [None, None])))
assert check
except AssertionError as e:
msg = ("Expected shape: {}\n"
"\t\tActual shape: {}".format(shape, df.shape))
e.args = (msg,)
raise
return df
def unique(df, columns=None):
"""
Asserts that columns in the DataFrame only have unique values.
Parameters
----------
df : DataFrame
columns : list
list of columns to restrict the check to. If None, check all columns.
Returns
-------
df : DataFrame
same as the original
"""
if columns is None:
columns = df.columns
for col in columns:
if not df[col].is_unique:
raise AssertionError("Column {!r} contains non-unique values".format(col))
return df
def unique_index(df):
"""
Assert that the index is unique
Parameters
==========
df : DataFrame
Returns
=======
df : DataFrame
"""
try:
assert df.index.is_unique
except AssertionError as e:
e.args = df.index.get_duplicates()
raise
return df
def within_set(df, items=None):
"""
Assert that df is a subset of items
Parameters
==========
df : DataFrame
items : dict
mapping of columns (k) to array-like of values (v) that
``df[k]`` is expected to be a subset of
Returns
=======
df : DataFrame
"""
for k, v in items.items():
if not df[k].isin(v).all():
bad = df.loc[~df[k].isin(v), k]
raise AssertionError('Not in set', bad)
return df
def within_range(df, items=None):
"""
Assert that a DataFrame is within a range.
Parameters
==========
df : DataFame
items : dict
mapping of columns (k) to a (low, high) tuple (v)
that ``df[k]`` is expected to be between.
Returns
=======
df : DataFrame
"""
for k, (lower, upper) in items.items():
if (lower > df[k]).any() or (upper < df[k]).any():
bad = (lower > df[k]) | (upper < df[k])
raise AssertionError("Outside range", bad)
return df
def within_n_std(df, n=3):
"""
Assert that every value is within ``n`` standard
deviations of its column's mean.
Parameters
==========
df : DataFame
n : int
number of standard deviations from the mean
Returns
=======
df : DataFrame
"""
means = df.mean()
stds = df.std()
inliers = (np.abs(df[means.index] - means) < n * stds)
if not np.all(inliers):
msg = generic.bad_locations(~inliers)
raise AssertionError(msg)
return df
def has_dtypes(df, items):
"""
Assert that a DataFrame has ``dtypes``
Parameters
==========
df: DataFrame
items: dict
mapping of columns to dtype.
Returns
=======
df : DataFrame
"""
dtypes = df.dtypes
for k, v in items.items():
if not dtypes[k] == v:
raise AssertionError("{} has the wrong dtype. Should be ({}), is ({})".format(k, v,dtypes[k]))
return df
def one_to_many(df, unitcol, manycol):
"""
Assert that a many-to-one relationship is preserved between two
columns. For example, a retail store will have have distinct
departments, each with several employees. If each employee may
only work in a single department, then the relationship of the
department to the employees is one to many.
Parameters
==========
df : DataFrame
unitcol : str
The column that encapulates the groups in ``manycol``.
manycol : str
The column that must remain unique in the distict pairs
between ``manycol`` and ``unitcol``
Returns
=======
df : DataFrame
"""
subset = df[[manycol, unitcol]].drop_duplicates()
for many in subset[manycol].unique():
if subset[subset[manycol] == many].shape[0] > 1:
msg = "{} in {} has multiple values for {}".format(many, manycol, unitcol)
raise AssertionError(msg)
return df
def is_same_as(df, df_to_compare, **kwargs):
"""
Assert that two pandas dataframes are the equal
Parameters
==========
df : pandas DataFrame
df_to_compare : pandas DataFrame
**kwargs : dict
keyword arguments passed through to panda's ``assert_frame_equal``
Returns
=======
df : DataFrame
"""
try:
tm.assert_frame_equal(df, df_to_compare, **kwargs)
except AssertionError as exc:
six.raise_from(AssertionError("DataFrames are not equal"), exc)
return df
__all__ = ['is_monotonic', 'is_same_as', 'is_shape', 'none_missing',
'unique_index', 'within_n_std', 'within_range', 'within_set',
'has_dtypes', 'verify', 'verify_all', 'verify_any',
'one_to_many','is_same_as',]
|
TomAugspurger/engarde
|
engarde/checks.py
|
is_shape
|
python
|
def is_shape(df, shape):
try:
check = np.all(np.equal(df.shape, shape) | (np.equal(shape, [-1, -1]) |
np.equal(shape, [None, None])))
assert check
except AssertionError as e:
msg = ("Expected shape: {}\n"
"\t\tActual shape: {}".format(shape, df.shape))
e.args = (msg,)
raise
return df
|
Asserts that the DataFrame is of a known shape.
Parameters
==========
df : DataFrame
shape : tuple
(n_rows, n_columns). Use None or -1 if you don't care
about a dimension.
Returns
=======
df : DataFrame
|
train
|
https://github.com/TomAugspurger/engarde/blob/e7ea040cf0d20aee7ca4375b8c27caa2d9e43945/engarde/checks.py#L87-L112
| null |
# -*- coding: utf-8 -*-
"""
checks.py
Each function in here should
- Take a DataFrame as its first argument, maybe optional arguments
- Makes its assert on the result
- Return the original DataFrame
"""
import numpy as np
import pandas as pd
import pandas.util.testing as tm
import six
from engarde import generic
from engarde.generic import verify, verify_all, verify_any
def none_missing(df, columns=None):
"""
Asserts that there are no missing values (NaNs) in the DataFrame.
Parameters
----------
df : DataFrame
columns : list
list of columns to restrict the check to
Returns
-------
df : DataFrame
same as the original
"""
if columns is None:
columns = df.columns
try:
assert not df[columns].isnull().any().any()
except AssertionError as e:
missing = df[columns].isnull()
msg = generic.bad_locations(missing)
e.args = msg
raise
return df
def is_monotonic(df, items=None, increasing=None, strict=False):
"""
Asserts that the DataFrame is monotonic.
Parameters
==========
df : Series or DataFrame
items : dict
mapping columns to conditions (increasing, strict)
increasing : None or bool
None is either increasing or decreasing.
strict : whether the comparison should be strict
Returns
=======
df : DataFrame
"""
if items is None:
items = {k: (increasing, strict) for k in df}
for col, (increasing, strict) in items.items():
s = pd.Index(df[col])
if increasing:
good = getattr(s, 'is_monotonic_increasing')
elif increasing is None:
good = getattr(s, 'is_monotonic') | getattr(s, 'is_monotonic_decreasing')
else:
good = getattr(s, 'is_monotonic_decreasing')
if strict:
if increasing:
good = good & (s.to_series().diff().dropna() > 0).all()
elif increasing is None:
good = good & ((s.to_series().diff().dropna() > 0).all() |
(s.to_series().diff().dropna() < 0).all())
else:
good = good & (s.to_series().diff().dropna() < 0).all()
if not good:
raise AssertionError
return df
def unique(df, columns=None):
"""
Asserts that columns in the DataFrame only have unique values.
Parameters
----------
df : DataFrame
columns : list
list of columns to restrict the check to. If None, check all columns.
Returns
-------
df : DataFrame
same as the original
"""
if columns is None:
columns = df.columns
for col in columns:
if not df[col].is_unique:
raise AssertionError("Column {!r} contains non-unique values".format(col))
return df
def unique_index(df):
"""
Assert that the index is unique
Parameters
==========
df : DataFrame
Returns
=======
df : DataFrame
"""
try:
assert df.index.is_unique
except AssertionError as e:
e.args = df.index.get_duplicates()
raise
return df
def within_set(df, items=None):
"""
Assert that df is a subset of items
Parameters
==========
df : DataFrame
items : dict
mapping of columns (k) to array-like of values (v) that
``df[k]`` is expected to be a subset of
Returns
=======
df : DataFrame
"""
for k, v in items.items():
if not df[k].isin(v).all():
bad = df.loc[~df[k].isin(v), k]
raise AssertionError('Not in set', bad)
return df
def within_range(df, items=None):
"""
Assert that a DataFrame is within a range.
Parameters
==========
df : DataFame
items : dict
mapping of columns (k) to a (low, high) tuple (v)
that ``df[k]`` is expected to be between.
Returns
=======
df : DataFrame
"""
for k, (lower, upper) in items.items():
if (lower > df[k]).any() or (upper < df[k]).any():
bad = (lower > df[k]) | (upper < df[k])
raise AssertionError("Outside range", bad)
return df
def within_n_std(df, n=3):
"""
Assert that every value is within ``n`` standard
deviations of its column's mean.
Parameters
==========
df : DataFame
n : int
number of standard deviations from the mean
Returns
=======
df : DataFrame
"""
means = df.mean()
stds = df.std()
inliers = (np.abs(df[means.index] - means) < n * stds)
if not np.all(inliers):
msg = generic.bad_locations(~inliers)
raise AssertionError(msg)
return df
def has_dtypes(df, items):
"""
Assert that a DataFrame has ``dtypes``
Parameters
==========
df: DataFrame
items: dict
mapping of columns to dtype.
Returns
=======
df : DataFrame
"""
dtypes = df.dtypes
for k, v in items.items():
if not dtypes[k] == v:
raise AssertionError("{} has the wrong dtype. Should be ({}), is ({})".format(k, v,dtypes[k]))
return df
def one_to_many(df, unitcol, manycol):
"""
Assert that a many-to-one relationship is preserved between two
columns. For example, a retail store will have have distinct
departments, each with several employees. If each employee may
only work in a single department, then the relationship of the
department to the employees is one to many.
Parameters
==========
df : DataFrame
unitcol : str
The column that encapulates the groups in ``manycol``.
manycol : str
The column that must remain unique in the distict pairs
between ``manycol`` and ``unitcol``
Returns
=======
df : DataFrame
"""
subset = df[[manycol, unitcol]].drop_duplicates()
for many in subset[manycol].unique():
if subset[subset[manycol] == many].shape[0] > 1:
msg = "{} in {} has multiple values for {}".format(many, manycol, unitcol)
raise AssertionError(msg)
return df
def is_same_as(df, df_to_compare, **kwargs):
"""
Assert that two pandas dataframes are the equal
Parameters
==========
df : pandas DataFrame
df_to_compare : pandas DataFrame
**kwargs : dict
keyword arguments passed through to panda's ``assert_frame_equal``
Returns
=======
df : DataFrame
"""
try:
tm.assert_frame_equal(df, df_to_compare, **kwargs)
except AssertionError as exc:
six.raise_from(AssertionError("DataFrames are not equal"), exc)
return df
__all__ = ['is_monotonic', 'is_same_as', 'is_shape', 'none_missing',
'unique_index', 'within_n_std', 'within_range', 'within_set',
'has_dtypes', 'verify', 'verify_all', 'verify_any',
'one_to_many','is_same_as',]
|
TomAugspurger/engarde
|
engarde/checks.py
|
unique
|
python
|
def unique(df, columns=None):
if columns is None:
columns = df.columns
for col in columns:
if not df[col].is_unique:
raise AssertionError("Column {!r} contains non-unique values".format(col))
return df
|
Asserts that columns in the DataFrame only have unique values.
Parameters
----------
df : DataFrame
columns : list
list of columns to restrict the check to. If None, check all columns.
Returns
-------
df : DataFrame
same as the original
|
train
|
https://github.com/TomAugspurger/engarde/blob/e7ea040cf0d20aee7ca4375b8c27caa2d9e43945/engarde/checks.py#L115-L135
| null |
# -*- coding: utf-8 -*-
"""
checks.py
Each function in here should
- Take a DataFrame as its first argument, maybe optional arguments
- Makes its assert on the result
- Return the original DataFrame
"""
import numpy as np
import pandas as pd
import pandas.util.testing as tm
import six
from engarde import generic
from engarde.generic import verify, verify_all, verify_any
def none_missing(df, columns=None):
"""
Asserts that there are no missing values (NaNs) in the DataFrame.
Parameters
----------
df : DataFrame
columns : list
list of columns to restrict the check to
Returns
-------
df : DataFrame
same as the original
"""
if columns is None:
columns = df.columns
try:
assert not df[columns].isnull().any().any()
except AssertionError as e:
missing = df[columns].isnull()
msg = generic.bad_locations(missing)
e.args = msg
raise
return df
def is_monotonic(df, items=None, increasing=None, strict=False):
"""
Asserts that the DataFrame is monotonic.
Parameters
==========
df : Series or DataFrame
items : dict
mapping columns to conditions (increasing, strict)
increasing : None or bool
None is either increasing or decreasing.
strict : whether the comparison should be strict
Returns
=======
df : DataFrame
"""
if items is None:
items = {k: (increasing, strict) for k in df}
for col, (increasing, strict) in items.items():
s = pd.Index(df[col])
if increasing:
good = getattr(s, 'is_monotonic_increasing')
elif increasing is None:
good = getattr(s, 'is_monotonic') | getattr(s, 'is_monotonic_decreasing')
else:
good = getattr(s, 'is_monotonic_decreasing')
if strict:
if increasing:
good = good & (s.to_series().diff().dropna() > 0).all()
elif increasing is None:
good = good & ((s.to_series().diff().dropna() > 0).all() |
(s.to_series().diff().dropna() < 0).all())
else:
good = good & (s.to_series().diff().dropna() < 0).all()
if not good:
raise AssertionError
return df
def is_shape(df, shape):
"""
Asserts that the DataFrame is of a known shape.
Parameters
==========
df : DataFrame
shape : tuple
(n_rows, n_columns). Use None or -1 if you don't care
about a dimension.
Returns
=======
df : DataFrame
"""
try:
check = np.all(np.equal(df.shape, shape) | (np.equal(shape, [-1, -1]) |
np.equal(shape, [None, None])))
assert check
except AssertionError as e:
msg = ("Expected shape: {}\n"
"\t\tActual shape: {}".format(shape, df.shape))
e.args = (msg,)
raise
return df
def unique_index(df):
"""
Assert that the index is unique
Parameters
==========
df : DataFrame
Returns
=======
df : DataFrame
"""
try:
assert df.index.is_unique
except AssertionError as e:
e.args = df.index.get_duplicates()
raise
return df
def within_set(df, items=None):
"""
Assert that df is a subset of items
Parameters
==========
df : DataFrame
items : dict
mapping of columns (k) to array-like of values (v) that
``df[k]`` is expected to be a subset of
Returns
=======
df : DataFrame
"""
for k, v in items.items():
if not df[k].isin(v).all():
bad = df.loc[~df[k].isin(v), k]
raise AssertionError('Not in set', bad)
return df
def within_range(df, items=None):
"""
Assert that a DataFrame is within a range.
Parameters
==========
df : DataFame
items : dict
mapping of columns (k) to a (low, high) tuple (v)
that ``df[k]`` is expected to be between.
Returns
=======
df : DataFrame
"""
for k, (lower, upper) in items.items():
if (lower > df[k]).any() or (upper < df[k]).any():
bad = (lower > df[k]) | (upper < df[k])
raise AssertionError("Outside range", bad)
return df
def within_n_std(df, n=3):
"""
Assert that every value is within ``n`` standard
deviations of its column's mean.
Parameters
==========
df : DataFame
n : int
number of standard deviations from the mean
Returns
=======
df : DataFrame
"""
means = df.mean()
stds = df.std()
inliers = (np.abs(df[means.index] - means) < n * stds)
if not np.all(inliers):
msg = generic.bad_locations(~inliers)
raise AssertionError(msg)
return df
def has_dtypes(df, items):
"""
Assert that a DataFrame has ``dtypes``
Parameters
==========
df: DataFrame
items: dict
mapping of columns to dtype.
Returns
=======
df : DataFrame
"""
dtypes = df.dtypes
for k, v in items.items():
if not dtypes[k] == v:
raise AssertionError("{} has the wrong dtype. Should be ({}), is ({})".format(k, v,dtypes[k]))
return df
def one_to_many(df, unitcol, manycol):
"""
Assert that a many-to-one relationship is preserved between two
columns. For example, a retail store will have have distinct
departments, each with several employees. If each employee may
only work in a single department, then the relationship of the
department to the employees is one to many.
Parameters
==========
df : DataFrame
unitcol : str
The column that encapulates the groups in ``manycol``.
manycol : str
The column that must remain unique in the distict pairs
between ``manycol`` and ``unitcol``
Returns
=======
df : DataFrame
"""
subset = df[[manycol, unitcol]].drop_duplicates()
for many in subset[manycol].unique():
if subset[subset[manycol] == many].shape[0] > 1:
msg = "{} in {} has multiple values for {}".format(many, manycol, unitcol)
raise AssertionError(msg)
return df
def is_same_as(df, df_to_compare, **kwargs):
"""
Assert that two pandas dataframes are the equal
Parameters
==========
df : pandas DataFrame
df_to_compare : pandas DataFrame
**kwargs : dict
keyword arguments passed through to panda's ``assert_frame_equal``
Returns
=======
df : DataFrame
"""
try:
tm.assert_frame_equal(df, df_to_compare, **kwargs)
except AssertionError as exc:
six.raise_from(AssertionError("DataFrames are not equal"), exc)
return df
__all__ = ['is_monotonic', 'is_same_as', 'is_shape', 'none_missing',
'unique_index', 'within_n_std', 'within_range', 'within_set',
'has_dtypes', 'verify', 'verify_all', 'verify_any',
'one_to_many','is_same_as',]
|
TomAugspurger/engarde
|
engarde/checks.py
|
unique_index
|
python
|
def unique_index(df):
try:
assert df.index.is_unique
except AssertionError as e:
e.args = df.index.get_duplicates()
raise
return df
|
Assert that the index is unique
Parameters
==========
df : DataFrame
Returns
=======
df : DataFrame
|
train
|
https://github.com/TomAugspurger/engarde/blob/e7ea040cf0d20aee7ca4375b8c27caa2d9e43945/engarde/checks.py#L138-L155
| null |
# -*- coding: utf-8 -*-
"""
checks.py
Each function in here should
- Take a DataFrame as its first argument, maybe optional arguments
- Makes its assert on the result
- Return the original DataFrame
"""
import numpy as np
import pandas as pd
import pandas.util.testing as tm
import six
from engarde import generic
from engarde.generic import verify, verify_all, verify_any
def none_missing(df, columns=None):
"""
Asserts that there are no missing values (NaNs) in the DataFrame.
Parameters
----------
df : DataFrame
columns : list
list of columns to restrict the check to
Returns
-------
df : DataFrame
same as the original
"""
if columns is None:
columns = df.columns
try:
assert not df[columns].isnull().any().any()
except AssertionError as e:
missing = df[columns].isnull()
msg = generic.bad_locations(missing)
e.args = msg
raise
return df
def is_monotonic(df, items=None, increasing=None, strict=False):
"""
Asserts that the DataFrame is monotonic.
Parameters
==========
df : Series or DataFrame
items : dict
mapping columns to conditions (increasing, strict)
increasing : None or bool
None is either increasing or decreasing.
strict : whether the comparison should be strict
Returns
=======
df : DataFrame
"""
if items is None:
items = {k: (increasing, strict) for k in df}
for col, (increasing, strict) in items.items():
s = pd.Index(df[col])
if increasing:
good = getattr(s, 'is_monotonic_increasing')
elif increasing is None:
good = getattr(s, 'is_monotonic') | getattr(s, 'is_monotonic_decreasing')
else:
good = getattr(s, 'is_monotonic_decreasing')
if strict:
if increasing:
good = good & (s.to_series().diff().dropna() > 0).all()
elif increasing is None:
good = good & ((s.to_series().diff().dropna() > 0).all() |
(s.to_series().diff().dropna() < 0).all())
else:
good = good & (s.to_series().diff().dropna() < 0).all()
if not good:
raise AssertionError
return df
def is_shape(df, shape):
"""
Asserts that the DataFrame is of a known shape.
Parameters
==========
df : DataFrame
shape : tuple
(n_rows, n_columns). Use None or -1 if you don't care
about a dimension.
Returns
=======
df : DataFrame
"""
try:
check = np.all(np.equal(df.shape, shape) | (np.equal(shape, [-1, -1]) |
np.equal(shape, [None, None])))
assert check
except AssertionError as e:
msg = ("Expected shape: {}\n"
"\t\tActual shape: {}".format(shape, df.shape))
e.args = (msg,)
raise
return df
def unique(df, columns=None):
"""
Asserts that columns in the DataFrame only have unique values.
Parameters
----------
df : DataFrame
columns : list
list of columns to restrict the check to. If None, check all columns.
Returns
-------
df : DataFrame
same as the original
"""
if columns is None:
columns = df.columns
for col in columns:
if not df[col].is_unique:
raise AssertionError("Column {!r} contains non-unique values".format(col))
return df
def within_set(df, items=None):
"""
Assert that df is a subset of items
Parameters
==========
df : DataFrame
items : dict
mapping of columns (k) to array-like of values (v) that
``df[k]`` is expected to be a subset of
Returns
=======
df : DataFrame
"""
for k, v in items.items():
if not df[k].isin(v).all():
bad = df.loc[~df[k].isin(v), k]
raise AssertionError('Not in set', bad)
return df
def within_range(df, items=None):
"""
Assert that a DataFrame is within a range.
Parameters
==========
df : DataFame
items : dict
mapping of columns (k) to a (low, high) tuple (v)
that ``df[k]`` is expected to be between.
Returns
=======
df : DataFrame
"""
for k, (lower, upper) in items.items():
if (lower > df[k]).any() or (upper < df[k]).any():
bad = (lower > df[k]) | (upper < df[k])
raise AssertionError("Outside range", bad)
return df
def within_n_std(df, n=3):
"""
Assert that every value is within ``n`` standard
deviations of its column's mean.
Parameters
==========
df : DataFame
n : int
number of standard deviations from the mean
Returns
=======
df : DataFrame
"""
means = df.mean()
stds = df.std()
inliers = (np.abs(df[means.index] - means) < n * stds)
if not np.all(inliers):
msg = generic.bad_locations(~inliers)
raise AssertionError(msg)
return df
def has_dtypes(df, items):
"""
Assert that a DataFrame has ``dtypes``
Parameters
==========
df: DataFrame
items: dict
mapping of columns to dtype.
Returns
=======
df : DataFrame
"""
dtypes = df.dtypes
for k, v in items.items():
if not dtypes[k] == v:
raise AssertionError("{} has the wrong dtype. Should be ({}), is ({})".format(k, v,dtypes[k]))
return df
def one_to_many(df, unitcol, manycol):
"""
Assert that a many-to-one relationship is preserved between two
columns. For example, a retail store will have have distinct
departments, each with several employees. If each employee may
only work in a single department, then the relationship of the
department to the employees is one to many.
Parameters
==========
df : DataFrame
unitcol : str
The column that encapulates the groups in ``manycol``.
manycol : str
The column that must remain unique in the distict pairs
between ``manycol`` and ``unitcol``
Returns
=======
df : DataFrame
"""
subset = df[[manycol, unitcol]].drop_duplicates()
for many in subset[manycol].unique():
if subset[subset[manycol] == many].shape[0] > 1:
msg = "{} in {} has multiple values for {}".format(many, manycol, unitcol)
raise AssertionError(msg)
return df
def is_same_as(df, df_to_compare, **kwargs):
"""
Assert that two pandas dataframes are the equal
Parameters
==========
df : pandas DataFrame
df_to_compare : pandas DataFrame
**kwargs : dict
keyword arguments passed through to panda's ``assert_frame_equal``
Returns
=======
df : DataFrame
"""
try:
tm.assert_frame_equal(df, df_to_compare, **kwargs)
except AssertionError as exc:
six.raise_from(AssertionError("DataFrames are not equal"), exc)
return df
__all__ = ['is_monotonic', 'is_same_as', 'is_shape', 'none_missing',
'unique_index', 'within_n_std', 'within_range', 'within_set',
'has_dtypes', 'verify', 'verify_all', 'verify_any',
'one_to_many','is_same_as',]
|
TomAugspurger/engarde
|
engarde/checks.py
|
within_set
|
python
|
def within_set(df, items=None):
for k, v in items.items():
if not df[k].isin(v).all():
bad = df.loc[~df[k].isin(v), k]
raise AssertionError('Not in set', bad)
return df
|
Assert that df is a subset of items
Parameters
==========
df : DataFrame
items : dict
mapping of columns (k) to array-like of values (v) that
``df[k]`` is expected to be a subset of
Returns
=======
df : DataFrame
|
train
|
https://github.com/TomAugspurger/engarde/blob/e7ea040cf0d20aee7ca4375b8c27caa2d9e43945/engarde/checks.py#L158-L177
| null |
# -*- coding: utf-8 -*-
"""
checks.py
Each function in here should
- Take a DataFrame as its first argument, maybe optional arguments
- Makes its assert on the result
- Return the original DataFrame
"""
import numpy as np
import pandas as pd
import pandas.util.testing as tm
import six
from engarde import generic
from engarde.generic import verify, verify_all, verify_any
def none_missing(df, columns=None):
"""
Asserts that there are no missing values (NaNs) in the DataFrame.
Parameters
----------
df : DataFrame
columns : list
list of columns to restrict the check to
Returns
-------
df : DataFrame
same as the original
"""
if columns is None:
columns = df.columns
try:
assert not df[columns].isnull().any().any()
except AssertionError as e:
missing = df[columns].isnull()
msg = generic.bad_locations(missing)
e.args = msg
raise
return df
def is_monotonic(df, items=None, increasing=None, strict=False):
"""
Asserts that the DataFrame is monotonic.
Parameters
==========
df : Series or DataFrame
items : dict
mapping columns to conditions (increasing, strict)
increasing : None or bool
None is either increasing or decreasing.
strict : whether the comparison should be strict
Returns
=======
df : DataFrame
"""
if items is None:
items = {k: (increasing, strict) for k in df}
for col, (increasing, strict) in items.items():
s = pd.Index(df[col])
if increasing:
good = getattr(s, 'is_monotonic_increasing')
elif increasing is None:
good = getattr(s, 'is_monotonic') | getattr(s, 'is_monotonic_decreasing')
else:
good = getattr(s, 'is_monotonic_decreasing')
if strict:
if increasing:
good = good & (s.to_series().diff().dropna() > 0).all()
elif increasing is None:
good = good & ((s.to_series().diff().dropna() > 0).all() |
(s.to_series().diff().dropna() < 0).all())
else:
good = good & (s.to_series().diff().dropna() < 0).all()
if not good:
raise AssertionError
return df
def is_shape(df, shape):
"""
Asserts that the DataFrame is of a known shape.
Parameters
==========
df : DataFrame
shape : tuple
(n_rows, n_columns). Use None or -1 if you don't care
about a dimension.
Returns
=======
df : DataFrame
"""
try:
check = np.all(np.equal(df.shape, shape) | (np.equal(shape, [-1, -1]) |
np.equal(shape, [None, None])))
assert check
except AssertionError as e:
msg = ("Expected shape: {}\n"
"\t\tActual shape: {}".format(shape, df.shape))
e.args = (msg,)
raise
return df
def unique(df, columns=None):
"""
Asserts that columns in the DataFrame only have unique values.
Parameters
----------
df : DataFrame
columns : list
list of columns to restrict the check to. If None, check all columns.
Returns
-------
df : DataFrame
same as the original
"""
if columns is None:
columns = df.columns
for col in columns:
if not df[col].is_unique:
raise AssertionError("Column {!r} contains non-unique values".format(col))
return df
def unique_index(df):
"""
Assert that the index is unique
Parameters
==========
df : DataFrame
Returns
=======
df : DataFrame
"""
try:
assert df.index.is_unique
except AssertionError as e:
e.args = df.index.get_duplicates()
raise
return df
def within_range(df, items=None):
"""
Assert that a DataFrame is within a range.
Parameters
==========
df : DataFame
items : dict
mapping of columns (k) to a (low, high) tuple (v)
that ``df[k]`` is expected to be between.
Returns
=======
df : DataFrame
"""
for k, (lower, upper) in items.items():
if (lower > df[k]).any() or (upper < df[k]).any():
bad = (lower > df[k]) | (upper < df[k])
raise AssertionError("Outside range", bad)
return df
def within_n_std(df, n=3):
"""
Assert that every value is within ``n`` standard
deviations of its column's mean.
Parameters
==========
df : DataFame
n : int
number of standard deviations from the mean
Returns
=======
df : DataFrame
"""
means = df.mean()
stds = df.std()
inliers = (np.abs(df[means.index] - means) < n * stds)
if not np.all(inliers):
msg = generic.bad_locations(~inliers)
raise AssertionError(msg)
return df
def has_dtypes(df, items):
"""
Assert that a DataFrame has ``dtypes``
Parameters
==========
df: DataFrame
items: dict
mapping of columns to dtype.
Returns
=======
df : DataFrame
"""
dtypes = df.dtypes
for k, v in items.items():
if not dtypes[k] == v:
raise AssertionError("{} has the wrong dtype. Should be ({}), is ({})".format(k, v,dtypes[k]))
return df
def one_to_many(df, unitcol, manycol):
"""
Assert that a many-to-one relationship is preserved between two
columns. For example, a retail store will have have distinct
departments, each with several employees. If each employee may
only work in a single department, then the relationship of the
department to the employees is one to many.
Parameters
==========
df : DataFrame
unitcol : str
The column that encapulates the groups in ``manycol``.
manycol : str
The column that must remain unique in the distict pairs
between ``manycol`` and ``unitcol``
Returns
=======
df : DataFrame
"""
subset = df[[manycol, unitcol]].drop_duplicates()
for many in subset[manycol].unique():
if subset[subset[manycol] == many].shape[0] > 1:
msg = "{} in {} has multiple values for {}".format(many, manycol, unitcol)
raise AssertionError(msg)
return df
def is_same_as(df, df_to_compare, **kwargs):
"""
Assert that two pandas dataframes are the equal
Parameters
==========
df : pandas DataFrame
df_to_compare : pandas DataFrame
**kwargs : dict
keyword arguments passed through to panda's ``assert_frame_equal``
Returns
=======
df : DataFrame
"""
try:
tm.assert_frame_equal(df, df_to_compare, **kwargs)
except AssertionError as exc:
six.raise_from(AssertionError("DataFrames are not equal"), exc)
return df
__all__ = ['is_monotonic', 'is_same_as', 'is_shape', 'none_missing',
'unique_index', 'within_n_std', 'within_range', 'within_set',
'has_dtypes', 'verify', 'verify_all', 'verify_any',
'one_to_many','is_same_as',]
|
TomAugspurger/engarde
|
engarde/checks.py
|
within_range
|
python
|
def within_range(df, items=None):
for k, (lower, upper) in items.items():
if (lower > df[k]).any() or (upper < df[k]).any():
bad = (lower > df[k]) | (upper < df[k])
raise AssertionError("Outside range", bad)
return df
|
Assert that a DataFrame is within a range.
Parameters
==========
df : DataFame
items : dict
mapping of columns (k) to a (low, high) tuple (v)
that ``df[k]`` is expected to be between.
Returns
=======
df : DataFrame
|
train
|
https://github.com/TomAugspurger/engarde/blob/e7ea040cf0d20aee7ca4375b8c27caa2d9e43945/engarde/checks.py#L179-L198
| null |
# -*- coding: utf-8 -*-
"""
checks.py
Each function in here should
- Take a DataFrame as its first argument, maybe optional arguments
- Makes its assert on the result
- Return the original DataFrame
"""
import numpy as np
import pandas as pd
import pandas.util.testing as tm
import six
from engarde import generic
from engarde.generic import verify, verify_all, verify_any
def none_missing(df, columns=None):
"""
Asserts that there are no missing values (NaNs) in the DataFrame.
Parameters
----------
df : DataFrame
columns : list
list of columns to restrict the check to
Returns
-------
df : DataFrame
same as the original
"""
if columns is None:
columns = df.columns
try:
assert not df[columns].isnull().any().any()
except AssertionError as e:
missing = df[columns].isnull()
msg = generic.bad_locations(missing)
e.args = msg
raise
return df
def is_monotonic(df, items=None, increasing=None, strict=False):
"""
Asserts that the DataFrame is monotonic.
Parameters
==========
df : Series or DataFrame
items : dict
mapping columns to conditions (increasing, strict)
increasing : None or bool
None is either increasing or decreasing.
strict : whether the comparison should be strict
Returns
=======
df : DataFrame
"""
if items is None:
items = {k: (increasing, strict) for k in df}
for col, (increasing, strict) in items.items():
s = pd.Index(df[col])
if increasing:
good = getattr(s, 'is_monotonic_increasing')
elif increasing is None:
good = getattr(s, 'is_monotonic') | getattr(s, 'is_monotonic_decreasing')
else:
good = getattr(s, 'is_monotonic_decreasing')
if strict:
if increasing:
good = good & (s.to_series().diff().dropna() > 0).all()
elif increasing is None:
good = good & ((s.to_series().diff().dropna() > 0).all() |
(s.to_series().diff().dropna() < 0).all())
else:
good = good & (s.to_series().diff().dropna() < 0).all()
if not good:
raise AssertionError
return df
def is_shape(df, shape):
"""
Asserts that the DataFrame is of a known shape.
Parameters
==========
df : DataFrame
shape : tuple
(n_rows, n_columns). Use None or -1 if you don't care
about a dimension.
Returns
=======
df : DataFrame
"""
try:
check = np.all(np.equal(df.shape, shape) | (np.equal(shape, [-1, -1]) |
np.equal(shape, [None, None])))
assert check
except AssertionError as e:
msg = ("Expected shape: {}\n"
"\t\tActual shape: {}".format(shape, df.shape))
e.args = (msg,)
raise
return df
def unique(df, columns=None):
"""
Asserts that columns in the DataFrame only have unique values.
Parameters
----------
df : DataFrame
columns : list
list of columns to restrict the check to. If None, check all columns.
Returns
-------
df : DataFrame
same as the original
"""
if columns is None:
columns = df.columns
for col in columns:
if not df[col].is_unique:
raise AssertionError("Column {!r} contains non-unique values".format(col))
return df
def unique_index(df):
"""
Assert that the index is unique
Parameters
==========
df : DataFrame
Returns
=======
df : DataFrame
"""
try:
assert df.index.is_unique
except AssertionError as e:
e.args = df.index.get_duplicates()
raise
return df
def within_set(df, items=None):
"""
Assert that df is a subset of items
Parameters
==========
df : DataFrame
items : dict
mapping of columns (k) to array-like of values (v) that
``df[k]`` is expected to be a subset of
Returns
=======
df : DataFrame
"""
for k, v in items.items():
if not df[k].isin(v).all():
bad = df.loc[~df[k].isin(v), k]
raise AssertionError('Not in set', bad)
return df
def within_n_std(df, n=3):
"""
Assert that every value is within ``n`` standard
deviations of its column's mean.
Parameters
==========
df : DataFame
n : int
number of standard deviations from the mean
Returns
=======
df : DataFrame
"""
means = df.mean()
stds = df.std()
inliers = (np.abs(df[means.index] - means) < n * stds)
if not np.all(inliers):
msg = generic.bad_locations(~inliers)
raise AssertionError(msg)
return df
def has_dtypes(df, items):
"""
Assert that a DataFrame has ``dtypes``
Parameters
==========
df: DataFrame
items: dict
mapping of columns to dtype.
Returns
=======
df : DataFrame
"""
dtypes = df.dtypes
for k, v in items.items():
if not dtypes[k] == v:
raise AssertionError("{} has the wrong dtype. Should be ({}), is ({})".format(k, v,dtypes[k]))
return df
def one_to_many(df, unitcol, manycol):
"""
Assert that a many-to-one relationship is preserved between two
columns. For example, a retail store will have have distinct
departments, each with several employees. If each employee may
only work in a single department, then the relationship of the
department to the employees is one to many.
Parameters
==========
df : DataFrame
unitcol : str
The column that encapulates the groups in ``manycol``.
manycol : str
The column that must remain unique in the distict pairs
between ``manycol`` and ``unitcol``
Returns
=======
df : DataFrame
"""
subset = df[[manycol, unitcol]].drop_duplicates()
for many in subset[manycol].unique():
if subset[subset[manycol] == many].shape[0] > 1:
msg = "{} in {} has multiple values for {}".format(many, manycol, unitcol)
raise AssertionError(msg)
return df
def is_same_as(df, df_to_compare, **kwargs):
"""
Assert that two pandas dataframes are the equal
Parameters
==========
df : pandas DataFrame
df_to_compare : pandas DataFrame
**kwargs : dict
keyword arguments passed through to panda's ``assert_frame_equal``
Returns
=======
df : DataFrame
"""
try:
tm.assert_frame_equal(df, df_to_compare, **kwargs)
except AssertionError as exc:
six.raise_from(AssertionError("DataFrames are not equal"), exc)
return df
__all__ = ['is_monotonic', 'is_same_as', 'is_shape', 'none_missing',
'unique_index', 'within_n_std', 'within_range', 'within_set',
'has_dtypes', 'verify', 'verify_all', 'verify_any',
'one_to_many','is_same_as',]
|
TomAugspurger/engarde
|
engarde/checks.py
|
within_n_std
|
python
|
def within_n_std(df, n=3):
means = df.mean()
stds = df.std()
inliers = (np.abs(df[means.index] - means) < n * stds)
if not np.all(inliers):
msg = generic.bad_locations(~inliers)
raise AssertionError(msg)
return df
|
Assert that every value is within ``n`` standard
deviations of its column's mean.
Parameters
==========
df : DataFame
n : int
number of standard deviations from the mean
Returns
=======
df : DataFrame
|
train
|
https://github.com/TomAugspurger/engarde/blob/e7ea040cf0d20aee7ca4375b8c27caa2d9e43945/engarde/checks.py#L200-L221
|
[
"def bad_locations(df):\n columns = df.columns\n all_locs = chain.from_iterable(zip(df.index, cycle([col])) for col in columns)\n bad = pd.Series(list(all_locs))[np.asarray(df).ravel(1)]\n msg = bad.values\n return msg\n"
] |
# -*- coding: utf-8 -*-
"""
checks.py
Each function in here should
- Take a DataFrame as its first argument, maybe optional arguments
- Makes its assert on the result
- Return the original DataFrame
"""
import numpy as np
import pandas as pd
import pandas.util.testing as tm
import six
from engarde import generic
from engarde.generic import verify, verify_all, verify_any
def none_missing(df, columns=None):
"""
Asserts that there are no missing values (NaNs) in the DataFrame.
Parameters
----------
df : DataFrame
columns : list
list of columns to restrict the check to
Returns
-------
df : DataFrame
same as the original
"""
if columns is None:
columns = df.columns
try:
assert not df[columns].isnull().any().any()
except AssertionError as e:
missing = df[columns].isnull()
msg = generic.bad_locations(missing)
e.args = msg
raise
return df
def is_monotonic(df, items=None, increasing=None, strict=False):
"""
Asserts that the DataFrame is monotonic.
Parameters
==========
df : Series or DataFrame
items : dict
mapping columns to conditions (increasing, strict)
increasing : None or bool
None is either increasing or decreasing.
strict : whether the comparison should be strict
Returns
=======
df : DataFrame
"""
if items is None:
items = {k: (increasing, strict) for k in df}
for col, (increasing, strict) in items.items():
s = pd.Index(df[col])
if increasing:
good = getattr(s, 'is_monotonic_increasing')
elif increasing is None:
good = getattr(s, 'is_monotonic') | getattr(s, 'is_monotonic_decreasing')
else:
good = getattr(s, 'is_monotonic_decreasing')
if strict:
if increasing:
good = good & (s.to_series().diff().dropna() > 0).all()
elif increasing is None:
good = good & ((s.to_series().diff().dropna() > 0).all() |
(s.to_series().diff().dropna() < 0).all())
else:
good = good & (s.to_series().diff().dropna() < 0).all()
if not good:
raise AssertionError
return df
def is_shape(df, shape):
"""
Asserts that the DataFrame is of a known shape.
Parameters
==========
df : DataFrame
shape : tuple
(n_rows, n_columns). Use None or -1 if you don't care
about a dimension.
Returns
=======
df : DataFrame
"""
try:
check = np.all(np.equal(df.shape, shape) | (np.equal(shape, [-1, -1]) |
np.equal(shape, [None, None])))
assert check
except AssertionError as e:
msg = ("Expected shape: {}\n"
"\t\tActual shape: {}".format(shape, df.shape))
e.args = (msg,)
raise
return df
def unique(df, columns=None):
"""
Asserts that columns in the DataFrame only have unique values.
Parameters
----------
df : DataFrame
columns : list
list of columns to restrict the check to. If None, check all columns.
Returns
-------
df : DataFrame
same as the original
"""
if columns is None:
columns = df.columns
for col in columns:
if not df[col].is_unique:
raise AssertionError("Column {!r} contains non-unique values".format(col))
return df
def unique_index(df):
"""
Assert that the index is unique
Parameters
==========
df : DataFrame
Returns
=======
df : DataFrame
"""
try:
assert df.index.is_unique
except AssertionError as e:
e.args = df.index.get_duplicates()
raise
return df
def within_set(df, items=None):
"""
Assert that df is a subset of items
Parameters
==========
df : DataFrame
items : dict
mapping of columns (k) to array-like of values (v) that
``df[k]`` is expected to be a subset of
Returns
=======
df : DataFrame
"""
for k, v in items.items():
if not df[k].isin(v).all():
bad = df.loc[~df[k].isin(v), k]
raise AssertionError('Not in set', bad)
return df
def within_range(df, items=None):
"""
Assert that a DataFrame is within a range.
Parameters
==========
df : DataFame
items : dict
mapping of columns (k) to a (low, high) tuple (v)
that ``df[k]`` is expected to be between.
Returns
=======
df : DataFrame
"""
for k, (lower, upper) in items.items():
if (lower > df[k]).any() or (upper < df[k]).any():
bad = (lower > df[k]) | (upper < df[k])
raise AssertionError("Outside range", bad)
return df
def has_dtypes(df, items):
"""
Assert that a DataFrame has ``dtypes``
Parameters
==========
df: DataFrame
items: dict
mapping of columns to dtype.
Returns
=======
df : DataFrame
"""
dtypes = df.dtypes
for k, v in items.items():
if not dtypes[k] == v:
raise AssertionError("{} has the wrong dtype. Should be ({}), is ({})".format(k, v,dtypes[k]))
return df
def one_to_many(df, unitcol, manycol):
"""
Assert that a many-to-one relationship is preserved between two
columns. For example, a retail store will have have distinct
departments, each with several employees. If each employee may
only work in a single department, then the relationship of the
department to the employees is one to many.
Parameters
==========
df : DataFrame
unitcol : str
The column that encapulates the groups in ``manycol``.
manycol : str
The column that must remain unique in the distict pairs
between ``manycol`` and ``unitcol``
Returns
=======
df : DataFrame
"""
subset = df[[manycol, unitcol]].drop_duplicates()
for many in subset[manycol].unique():
if subset[subset[manycol] == many].shape[0] > 1:
msg = "{} in {} has multiple values for {}".format(many, manycol, unitcol)
raise AssertionError(msg)
return df
def is_same_as(df, df_to_compare, **kwargs):
"""
Assert that two pandas dataframes are the equal
Parameters
==========
df : pandas DataFrame
df_to_compare : pandas DataFrame
**kwargs : dict
keyword arguments passed through to panda's ``assert_frame_equal``
Returns
=======
df : DataFrame
"""
try:
tm.assert_frame_equal(df, df_to_compare, **kwargs)
except AssertionError as exc:
six.raise_from(AssertionError("DataFrames are not equal"), exc)
return df
__all__ = ['is_monotonic', 'is_same_as', 'is_shape', 'none_missing',
'unique_index', 'within_n_std', 'within_range', 'within_set',
'has_dtypes', 'verify', 'verify_all', 'verify_any',
'one_to_many','is_same_as',]
|
TomAugspurger/engarde
|
engarde/checks.py
|
has_dtypes
|
python
|
def has_dtypes(df, items):
dtypes = df.dtypes
for k, v in items.items():
if not dtypes[k] == v:
raise AssertionError("{} has the wrong dtype. Should be ({}), is ({})".format(k, v,dtypes[k]))
return df
|
Assert that a DataFrame has ``dtypes``
Parameters
==========
df: DataFrame
items: dict
mapping of columns to dtype.
Returns
=======
df : DataFrame
|
train
|
https://github.com/TomAugspurger/engarde/blob/e7ea040cf0d20aee7ca4375b8c27caa2d9e43945/engarde/checks.py#L223-L241
| null |
# -*- coding: utf-8 -*-
"""
checks.py
Each function in here should
- Take a DataFrame as its first argument, maybe optional arguments
- Makes its assert on the result
- Return the original DataFrame
"""
import numpy as np
import pandas as pd
import pandas.util.testing as tm
import six
from engarde import generic
from engarde.generic import verify, verify_all, verify_any
def none_missing(df, columns=None):
"""
Asserts that there are no missing values (NaNs) in the DataFrame.
Parameters
----------
df : DataFrame
columns : list
list of columns to restrict the check to
Returns
-------
df : DataFrame
same as the original
"""
if columns is None:
columns = df.columns
try:
assert not df[columns].isnull().any().any()
except AssertionError as e:
missing = df[columns].isnull()
msg = generic.bad_locations(missing)
e.args = msg
raise
return df
def is_monotonic(df, items=None, increasing=None, strict=False):
"""
Asserts that the DataFrame is monotonic.
Parameters
==========
df : Series or DataFrame
items : dict
mapping columns to conditions (increasing, strict)
increasing : None or bool
None is either increasing or decreasing.
strict : whether the comparison should be strict
Returns
=======
df : DataFrame
"""
if items is None:
items = {k: (increasing, strict) for k in df}
for col, (increasing, strict) in items.items():
s = pd.Index(df[col])
if increasing:
good = getattr(s, 'is_monotonic_increasing')
elif increasing is None:
good = getattr(s, 'is_monotonic') | getattr(s, 'is_monotonic_decreasing')
else:
good = getattr(s, 'is_monotonic_decreasing')
if strict:
if increasing:
good = good & (s.to_series().diff().dropna() > 0).all()
elif increasing is None:
good = good & ((s.to_series().diff().dropna() > 0).all() |
(s.to_series().diff().dropna() < 0).all())
else:
good = good & (s.to_series().diff().dropna() < 0).all()
if not good:
raise AssertionError
return df
def is_shape(df, shape):
"""
Asserts that the DataFrame is of a known shape.
Parameters
==========
df : DataFrame
shape : tuple
(n_rows, n_columns). Use None or -1 if you don't care
about a dimension.
Returns
=======
df : DataFrame
"""
try:
check = np.all(np.equal(df.shape, shape) | (np.equal(shape, [-1, -1]) |
np.equal(shape, [None, None])))
assert check
except AssertionError as e:
msg = ("Expected shape: {}\n"
"\t\tActual shape: {}".format(shape, df.shape))
e.args = (msg,)
raise
return df
def unique(df, columns=None):
"""
Asserts that columns in the DataFrame only have unique values.
Parameters
----------
df : DataFrame
columns : list
list of columns to restrict the check to. If None, check all columns.
Returns
-------
df : DataFrame
same as the original
"""
if columns is None:
columns = df.columns
for col in columns:
if not df[col].is_unique:
raise AssertionError("Column {!r} contains non-unique values".format(col))
return df
def unique_index(df):
"""
Assert that the index is unique
Parameters
==========
df : DataFrame
Returns
=======
df : DataFrame
"""
try:
assert df.index.is_unique
except AssertionError as e:
e.args = df.index.get_duplicates()
raise
return df
def within_set(df, items=None):
"""
Assert that df is a subset of items
Parameters
==========
df : DataFrame
items : dict
mapping of columns (k) to array-like of values (v) that
``df[k]`` is expected to be a subset of
Returns
=======
df : DataFrame
"""
for k, v in items.items():
if not df[k].isin(v).all():
bad = df.loc[~df[k].isin(v), k]
raise AssertionError('Not in set', bad)
return df
def within_range(df, items=None):
"""
Assert that a DataFrame is within a range.
Parameters
==========
df : DataFame
items : dict
mapping of columns (k) to a (low, high) tuple (v)
that ``df[k]`` is expected to be between.
Returns
=======
df : DataFrame
"""
for k, (lower, upper) in items.items():
if (lower > df[k]).any() or (upper < df[k]).any():
bad = (lower > df[k]) | (upper < df[k])
raise AssertionError("Outside range", bad)
return df
def within_n_std(df, n=3):
"""
Assert that every value is within ``n`` standard
deviations of its column's mean.
Parameters
==========
df : DataFame
n : int
number of standard deviations from the mean
Returns
=======
df : DataFrame
"""
means = df.mean()
stds = df.std()
inliers = (np.abs(df[means.index] - means) < n * stds)
if not np.all(inliers):
msg = generic.bad_locations(~inliers)
raise AssertionError(msg)
return df
def one_to_many(df, unitcol, manycol):
"""
Assert that a many-to-one relationship is preserved between two
columns. For example, a retail store will have have distinct
departments, each with several employees. If each employee may
only work in a single department, then the relationship of the
department to the employees is one to many.
Parameters
==========
df : DataFrame
unitcol : str
The column that encapulates the groups in ``manycol``.
manycol : str
The column that must remain unique in the distict pairs
between ``manycol`` and ``unitcol``
Returns
=======
df : DataFrame
"""
subset = df[[manycol, unitcol]].drop_duplicates()
for many in subset[manycol].unique():
if subset[subset[manycol] == many].shape[0] > 1:
msg = "{} in {} has multiple values for {}".format(many, manycol, unitcol)
raise AssertionError(msg)
return df
def is_same_as(df, df_to_compare, **kwargs):
"""
Assert that two pandas dataframes are the equal
Parameters
==========
df : pandas DataFrame
df_to_compare : pandas DataFrame
**kwargs : dict
keyword arguments passed through to panda's ``assert_frame_equal``
Returns
=======
df : DataFrame
"""
try:
tm.assert_frame_equal(df, df_to_compare, **kwargs)
except AssertionError as exc:
six.raise_from(AssertionError("DataFrames are not equal"), exc)
return df
__all__ = ['is_monotonic', 'is_same_as', 'is_shape', 'none_missing',
'unique_index', 'within_n_std', 'within_range', 'within_set',
'has_dtypes', 'verify', 'verify_all', 'verify_any',
'one_to_many','is_same_as',]
|
TomAugspurger/engarde
|
engarde/checks.py
|
one_to_many
|
python
|
def one_to_many(df, unitcol, manycol):
subset = df[[manycol, unitcol]].drop_duplicates()
for many in subset[manycol].unique():
if subset[subset[manycol] == many].shape[0] > 1:
msg = "{} in {} has multiple values for {}".format(many, manycol, unitcol)
raise AssertionError(msg)
return df
|
Assert that a many-to-one relationship is preserved between two
columns. For example, a retail store will have have distinct
departments, each with several employees. If each employee may
only work in a single department, then the relationship of the
department to the employees is one to many.
Parameters
==========
df : DataFrame
unitcol : str
The column that encapulates the groups in ``manycol``.
manycol : str
The column that must remain unique in the distict pairs
between ``manycol`` and ``unitcol``
Returns
=======
df : DataFrame
|
train
|
https://github.com/TomAugspurger/engarde/blob/e7ea040cf0d20aee7ca4375b8c27caa2d9e43945/engarde/checks.py#L244-L272
| null |
# -*- coding: utf-8 -*-
"""
checks.py
Each function in here should
- Take a DataFrame as its first argument, maybe optional arguments
- Makes its assert on the result
- Return the original DataFrame
"""
import numpy as np
import pandas as pd
import pandas.util.testing as tm
import six
from engarde import generic
from engarde.generic import verify, verify_all, verify_any
def none_missing(df, columns=None):
"""
Asserts that there are no missing values (NaNs) in the DataFrame.
Parameters
----------
df : DataFrame
columns : list
list of columns to restrict the check to
Returns
-------
df : DataFrame
same as the original
"""
if columns is None:
columns = df.columns
try:
assert not df[columns].isnull().any().any()
except AssertionError as e:
missing = df[columns].isnull()
msg = generic.bad_locations(missing)
e.args = msg
raise
return df
def is_monotonic(df, items=None, increasing=None, strict=False):
"""
Asserts that the DataFrame is monotonic.
Parameters
==========
df : Series or DataFrame
items : dict
mapping columns to conditions (increasing, strict)
increasing : None or bool
None is either increasing or decreasing.
strict : whether the comparison should be strict
Returns
=======
df : DataFrame
"""
if items is None:
items = {k: (increasing, strict) for k in df}
for col, (increasing, strict) in items.items():
s = pd.Index(df[col])
if increasing:
good = getattr(s, 'is_monotonic_increasing')
elif increasing is None:
good = getattr(s, 'is_monotonic') | getattr(s, 'is_monotonic_decreasing')
else:
good = getattr(s, 'is_monotonic_decreasing')
if strict:
if increasing:
good = good & (s.to_series().diff().dropna() > 0).all()
elif increasing is None:
good = good & ((s.to_series().diff().dropna() > 0).all() |
(s.to_series().diff().dropna() < 0).all())
else:
good = good & (s.to_series().diff().dropna() < 0).all()
if not good:
raise AssertionError
return df
def is_shape(df, shape):
"""
Asserts that the DataFrame is of a known shape.
Parameters
==========
df : DataFrame
shape : tuple
(n_rows, n_columns). Use None or -1 if you don't care
about a dimension.
Returns
=======
df : DataFrame
"""
try:
check = np.all(np.equal(df.shape, shape) | (np.equal(shape, [-1, -1]) |
np.equal(shape, [None, None])))
assert check
except AssertionError as e:
msg = ("Expected shape: {}\n"
"\t\tActual shape: {}".format(shape, df.shape))
e.args = (msg,)
raise
return df
def unique(df, columns=None):
"""
Asserts that columns in the DataFrame only have unique values.
Parameters
----------
df : DataFrame
columns : list
list of columns to restrict the check to. If None, check all columns.
Returns
-------
df : DataFrame
same as the original
"""
if columns is None:
columns = df.columns
for col in columns:
if not df[col].is_unique:
raise AssertionError("Column {!r} contains non-unique values".format(col))
return df
def unique_index(df):
"""
Assert that the index is unique
Parameters
==========
df : DataFrame
Returns
=======
df : DataFrame
"""
try:
assert df.index.is_unique
except AssertionError as e:
e.args = df.index.get_duplicates()
raise
return df
def within_set(df, items=None):
"""
Assert that df is a subset of items
Parameters
==========
df : DataFrame
items : dict
mapping of columns (k) to array-like of values (v) that
``df[k]`` is expected to be a subset of
Returns
=======
df : DataFrame
"""
for k, v in items.items():
if not df[k].isin(v).all():
bad = df.loc[~df[k].isin(v), k]
raise AssertionError('Not in set', bad)
return df
def within_range(df, items=None):
"""
Assert that a DataFrame is within a range.
Parameters
==========
df : DataFame
items : dict
mapping of columns (k) to a (low, high) tuple (v)
that ``df[k]`` is expected to be between.
Returns
=======
df : DataFrame
"""
for k, (lower, upper) in items.items():
if (lower > df[k]).any() or (upper < df[k]).any():
bad = (lower > df[k]) | (upper < df[k])
raise AssertionError("Outside range", bad)
return df
def within_n_std(df, n=3):
"""
Assert that every value is within ``n`` standard
deviations of its column's mean.
Parameters
==========
df : DataFame
n : int
number of standard deviations from the mean
Returns
=======
df : DataFrame
"""
means = df.mean()
stds = df.std()
inliers = (np.abs(df[means.index] - means) < n * stds)
if not np.all(inliers):
msg = generic.bad_locations(~inliers)
raise AssertionError(msg)
return df
def has_dtypes(df, items):
"""
Assert that a DataFrame has ``dtypes``
Parameters
==========
df: DataFrame
items: dict
mapping of columns to dtype.
Returns
=======
df : DataFrame
"""
dtypes = df.dtypes
for k, v in items.items():
if not dtypes[k] == v:
raise AssertionError("{} has the wrong dtype. Should be ({}), is ({})".format(k, v,dtypes[k]))
return df
def is_same_as(df, df_to_compare, **kwargs):
"""
Assert that two pandas dataframes are the equal
Parameters
==========
df : pandas DataFrame
df_to_compare : pandas DataFrame
**kwargs : dict
keyword arguments passed through to panda's ``assert_frame_equal``
Returns
=======
df : DataFrame
"""
try:
tm.assert_frame_equal(df, df_to_compare, **kwargs)
except AssertionError as exc:
six.raise_from(AssertionError("DataFrames are not equal"), exc)
return df
__all__ = ['is_monotonic', 'is_same_as', 'is_shape', 'none_missing',
'unique_index', 'within_n_std', 'within_range', 'within_set',
'has_dtypes', 'verify', 'verify_all', 'verify_any',
'one_to_many','is_same_as',]
|
TomAugspurger/engarde
|
engarde/checks.py
|
is_same_as
|
python
|
def is_same_as(df, df_to_compare, **kwargs):
try:
tm.assert_frame_equal(df, df_to_compare, **kwargs)
except AssertionError as exc:
six.raise_from(AssertionError("DataFrames are not equal"), exc)
return df
|
Assert that two pandas dataframes are the equal
Parameters
==========
df : pandas DataFrame
df_to_compare : pandas DataFrame
**kwargs : dict
keyword arguments passed through to panda's ``assert_frame_equal``
Returns
=======
df : DataFrame
|
train
|
https://github.com/TomAugspurger/engarde/blob/e7ea040cf0d20aee7ca4375b8c27caa2d9e43945/engarde/checks.py#L275-L295
| null |
# -*- coding: utf-8 -*-
"""
checks.py
Each function in here should
- Take a DataFrame as its first argument, maybe optional arguments
- Makes its assert on the result
- Return the original DataFrame
"""
import numpy as np
import pandas as pd
import pandas.util.testing as tm
import six
from engarde import generic
from engarde.generic import verify, verify_all, verify_any
def none_missing(df, columns=None):
"""
Asserts that there are no missing values (NaNs) in the DataFrame.
Parameters
----------
df : DataFrame
columns : list
list of columns to restrict the check to
Returns
-------
df : DataFrame
same as the original
"""
if columns is None:
columns = df.columns
try:
assert not df[columns].isnull().any().any()
except AssertionError as e:
missing = df[columns].isnull()
msg = generic.bad_locations(missing)
e.args = msg
raise
return df
def is_monotonic(df, items=None, increasing=None, strict=False):
"""
Asserts that the DataFrame is monotonic.
Parameters
==========
df : Series or DataFrame
items : dict
mapping columns to conditions (increasing, strict)
increasing : None or bool
None is either increasing or decreasing.
strict : whether the comparison should be strict
Returns
=======
df : DataFrame
"""
if items is None:
items = {k: (increasing, strict) for k in df}
for col, (increasing, strict) in items.items():
s = pd.Index(df[col])
if increasing:
good = getattr(s, 'is_monotonic_increasing')
elif increasing is None:
good = getattr(s, 'is_monotonic') | getattr(s, 'is_monotonic_decreasing')
else:
good = getattr(s, 'is_monotonic_decreasing')
if strict:
if increasing:
good = good & (s.to_series().diff().dropna() > 0).all()
elif increasing is None:
good = good & ((s.to_series().diff().dropna() > 0).all() |
(s.to_series().diff().dropna() < 0).all())
else:
good = good & (s.to_series().diff().dropna() < 0).all()
if not good:
raise AssertionError
return df
def is_shape(df, shape):
"""
Asserts that the DataFrame is of a known shape.
Parameters
==========
df : DataFrame
shape : tuple
(n_rows, n_columns). Use None or -1 if you don't care
about a dimension.
Returns
=======
df : DataFrame
"""
try:
check = np.all(np.equal(df.shape, shape) | (np.equal(shape, [-1, -1]) |
np.equal(shape, [None, None])))
assert check
except AssertionError as e:
msg = ("Expected shape: {}\n"
"\t\tActual shape: {}".format(shape, df.shape))
e.args = (msg,)
raise
return df
def unique(df, columns=None):
"""
Asserts that columns in the DataFrame only have unique values.
Parameters
----------
df : DataFrame
columns : list
list of columns to restrict the check to. If None, check all columns.
Returns
-------
df : DataFrame
same as the original
"""
if columns is None:
columns = df.columns
for col in columns:
if not df[col].is_unique:
raise AssertionError("Column {!r} contains non-unique values".format(col))
return df
def unique_index(df):
"""
Assert that the index is unique
Parameters
==========
df : DataFrame
Returns
=======
df : DataFrame
"""
try:
assert df.index.is_unique
except AssertionError as e:
e.args = df.index.get_duplicates()
raise
return df
def within_set(df, items=None):
"""
Assert that df is a subset of items
Parameters
==========
df : DataFrame
items : dict
mapping of columns (k) to array-like of values (v) that
``df[k]`` is expected to be a subset of
Returns
=======
df : DataFrame
"""
for k, v in items.items():
if not df[k].isin(v).all():
bad = df.loc[~df[k].isin(v), k]
raise AssertionError('Not in set', bad)
return df
def within_range(df, items=None):
"""
Assert that a DataFrame is within a range.
Parameters
==========
df : DataFame
items : dict
mapping of columns (k) to a (low, high) tuple (v)
that ``df[k]`` is expected to be between.
Returns
=======
df : DataFrame
"""
for k, (lower, upper) in items.items():
if (lower > df[k]).any() or (upper < df[k]).any():
bad = (lower > df[k]) | (upper < df[k])
raise AssertionError("Outside range", bad)
return df
def within_n_std(df, n=3):
"""
Assert that every value is within ``n`` standard
deviations of its column's mean.
Parameters
==========
df : DataFame
n : int
number of standard deviations from the mean
Returns
=======
df : DataFrame
"""
means = df.mean()
stds = df.std()
inliers = (np.abs(df[means.index] - means) < n * stds)
if not np.all(inliers):
msg = generic.bad_locations(~inliers)
raise AssertionError(msg)
return df
def has_dtypes(df, items):
"""
Assert that a DataFrame has ``dtypes``
Parameters
==========
df: DataFrame
items: dict
mapping of columns to dtype.
Returns
=======
df : DataFrame
"""
dtypes = df.dtypes
for k, v in items.items():
if not dtypes[k] == v:
raise AssertionError("{} has the wrong dtype. Should be ({}), is ({})".format(k, v,dtypes[k]))
return df
def one_to_many(df, unitcol, manycol):
"""
Assert that a many-to-one relationship is preserved between two
columns. For example, a retail store will have have distinct
departments, each with several employees. If each employee may
only work in a single department, then the relationship of the
department to the employees is one to many.
Parameters
==========
df : DataFrame
unitcol : str
The column that encapulates the groups in ``manycol``.
manycol : str
The column that must remain unique in the distict pairs
between ``manycol`` and ``unitcol``
Returns
=======
df : DataFrame
"""
subset = df[[manycol, unitcol]].drop_duplicates()
for many in subset[manycol].unique():
if subset[subset[manycol] == many].shape[0] > 1:
msg = "{} in {} has multiple values for {}".format(many, manycol, unitcol)
raise AssertionError(msg)
return df
__all__ = ['is_monotonic', 'is_same_as', 'is_shape', 'none_missing',
'unique_index', 'within_n_std', 'within_range', 'within_set',
'has_dtypes', 'verify', 'verify_all', 'verify_any',
'one_to_many','is_same_as',]
|
TomAugspurger/engarde
|
engarde/generic.py
|
verify
|
python
|
def verify(df, check, *args, **kwargs):
result = check(df, *args, **kwargs)
try:
assert result
except AssertionError as e:
msg = '{} is not true'.format(check.__name__)
e.args = (msg, df)
raise
return df
|
Generic verify. Assert that ``check(df, *args, **kwargs)`` is
true.
Parameters
==========
df : DataFrame
check : function
Should take DataFrame and **kwargs. Returns bool
Returns
=======
df : DataFrame
same as the input.
|
train
|
https://github.com/TomAugspurger/engarde/blob/e7ea040cf0d20aee7ca4375b8c27caa2d9e43945/engarde/generic.py#L15-L38
|
[
"f = lambda x, n: len(x) > n\n"
] |
# -*- coding: utf-8 -*-
"""
Module for useful generic functions.
"""
from itertools import chain, cycle
import numpy as np
import pandas as pd
# --------------
# Generic verify
# --------------
def verify_all(df, check, *args, **kwargs):
"""
Verify that all the entries in ``check(df, *args, **kwargs)``
are true.
"""
result = check(df, *args, **kwargs)
try:
assert np.all(result)
except AssertionError as e:
msg = "{} not true for all".format(check.__name__)
e.args = (msg, df[~result])
raise
return df
def verify_any(df, check, *args, **kwargs):
"""
Verify that any of the entries in ``check(df, *args, **kwargs)``
is true
"""
result = check(df, *args, **kwargs)
try:
assert np.any(result)
except AssertionError as e:
msg = '{} not true for any'.format(check.__name__)
e.args = (msg, df)
raise
return df
# ---------------
# Error reporting
# ---------------
def bad_locations(df):
columns = df.columns
all_locs = chain.from_iterable(zip(df.index, cycle([col])) for col in columns)
bad = pd.Series(list(all_locs))[np.asarray(df).ravel(1)]
msg = bad.values
return msg
__all__ = ['verify', 'verify_all', 'verify_any', 'bad_locations']
|
TomAugspurger/engarde
|
engarde/generic.py
|
verify_all
|
python
|
def verify_all(df, check, *args, **kwargs):
result = check(df, *args, **kwargs)
try:
assert np.all(result)
except AssertionError as e:
msg = "{} not true for all".format(check.__name__)
e.args = (msg, df[~result])
raise
return df
|
Verify that all the entries in ``check(df, *args, **kwargs)``
are true.
|
train
|
https://github.com/TomAugspurger/engarde/blob/e7ea040cf0d20aee7ca4375b8c27caa2d9e43945/engarde/generic.py#L40-L52
|
[
"f = lambda x, n: x > n\n"
] |
# -*- coding: utf-8 -*-
"""
Module for useful generic functions.
"""
from itertools import chain, cycle
import numpy as np
import pandas as pd
# --------------
# Generic verify
# --------------
def verify(df, check, *args, **kwargs):
"""
Generic verify. Assert that ``check(df, *args, **kwargs)`` is
true.
Parameters
==========
df : DataFrame
check : function
Should take DataFrame and **kwargs. Returns bool
Returns
=======
df : DataFrame
same as the input.
"""
result = check(df, *args, **kwargs)
try:
assert result
except AssertionError as e:
msg = '{} is not true'.format(check.__name__)
e.args = (msg, df)
raise
return df
def verify_any(df, check, *args, **kwargs):
"""
Verify that any of the entries in ``check(df, *args, **kwargs)``
is true
"""
result = check(df, *args, **kwargs)
try:
assert np.any(result)
except AssertionError as e:
msg = '{} not true for any'.format(check.__name__)
e.args = (msg, df)
raise
return df
# ---------------
# Error reporting
# ---------------
def bad_locations(df):
columns = df.columns
all_locs = chain.from_iterable(zip(df.index, cycle([col])) for col in columns)
bad = pd.Series(list(all_locs))[np.asarray(df).ravel(1)]
msg = bad.values
return msg
__all__ = ['verify', 'verify_all', 'verify_any', 'bad_locations']
|
TomAugspurger/engarde
|
engarde/generic.py
|
verify_any
|
python
|
def verify_any(df, check, *args, **kwargs):
result = check(df, *args, **kwargs)
try:
assert np.any(result)
except AssertionError as e:
msg = '{} not true for any'.format(check.__name__)
e.args = (msg, df)
raise
return df
|
Verify that any of the entries in ``check(df, *args, **kwargs)``
is true
|
train
|
https://github.com/TomAugspurger/engarde/blob/e7ea040cf0d20aee7ca4375b8c27caa2d9e43945/engarde/generic.py#L54-L66
|
[
"f = lambda x, n: x > n\n"
] |
# -*- coding: utf-8 -*-
"""
Module for useful generic functions.
"""
from itertools import chain, cycle
import numpy as np
import pandas as pd
# --------------
# Generic verify
# --------------
def verify(df, check, *args, **kwargs):
"""
Generic verify. Assert that ``check(df, *args, **kwargs)`` is
true.
Parameters
==========
df : DataFrame
check : function
Should take DataFrame and **kwargs. Returns bool
Returns
=======
df : DataFrame
same as the input.
"""
result = check(df, *args, **kwargs)
try:
assert result
except AssertionError as e:
msg = '{} is not true'.format(check.__name__)
e.args = (msg, df)
raise
return df
def verify_all(df, check, *args, **kwargs):
"""
Verify that all the entries in ``check(df, *args, **kwargs)``
are true.
"""
result = check(df, *args, **kwargs)
try:
assert np.all(result)
except AssertionError as e:
msg = "{} not true for all".format(check.__name__)
e.args = (msg, df[~result])
raise
return df
# ---------------
# Error reporting
# ---------------
def bad_locations(df):
columns = df.columns
all_locs = chain.from_iterable(zip(df.index, cycle([col])) for col in columns)
bad = pd.Series(list(all_locs))[np.asarray(df).ravel(1)]
msg = bad.values
return msg
__all__ = ['verify', 'verify_all', 'verify_any', 'bad_locations']
|
TomAugspurger/engarde
|
docs/sphinxext/ipython_directive.py
|
EmbeddedSphinxShell.process_input
|
python
|
def process_input(self, data, input_prompt, lineno):
decorator, input, rest = data
image_file = None
image_directive = None
is_verbatim = decorator=='@verbatim' or self.is_verbatim
is_doctest = (decorator is not None and \
decorator.startswith('@doctest')) or self.is_doctest
is_suppress = decorator=='@suppress' or self.is_suppress
is_okexcept = decorator=='@okexcept' or self.is_okexcept
is_okwarning = decorator=='@okwarning' or self.is_okwarning
is_savefig = decorator is not None and \
decorator.startswith('@savefig')
# set the encodings to be used by DecodingStringIO
# to convert the execution output into unicode if
# needed. this attrib is set by IpythonDirective.run()
# based on the specified block options, defaulting to ['ut
self.cout.set_encodings(self.output_encoding)
input_lines = input.split('\n')
if len(input_lines) > 1:
if input_lines[-1] != "":
input_lines.append('') # make sure there's a blank line
# so splitter buffer gets reset
continuation = ' %s:'%''.join(['.']*(len(str(lineno))+2))
if is_savefig:
image_file, image_directive = self.process_image(decorator)
ret = []
is_semicolon = False
# Hold the execution count, if requested to do so.
if is_suppress and self.hold_count:
store_history = False
else:
store_history = True
# Note: catch_warnings is not thread safe
with warnings.catch_warnings(record=True) as ws:
for i, line in enumerate(input_lines):
if line.endswith(';'):
is_semicolon = True
if i == 0:
# process the first input line
if is_verbatim:
self.process_input_line('')
self.IP.execution_count += 1 # increment it anyway
else:
# only submit the line in non-verbatim mode
self.process_input_line(line, store_history=store_history)
formatted_line = '%s %s'%(input_prompt, line)
else:
# process a continuation line
if not is_verbatim:
self.process_input_line(line, store_history=store_history)
formatted_line = '%s %s'%(continuation, line)
if not is_suppress:
ret.append(formatted_line)
if not is_suppress and len(rest.strip()) and is_verbatim:
# the "rest" is the standard output of the
# input, which needs to be added in
# verbatim mode
ret.append(rest)
self.cout.seek(0)
output = self.cout.read()
if not is_suppress and not is_semicolon:
ret.append(output)
elif is_semicolon: # get spacing right
ret.append('')
# context information
filename = self.state.document.current_source
lineno = self.state.document.current_line
# output any exceptions raised during execution to stdout
# unless :okexcept: has been specified.
if not is_okexcept and "Traceback" in output:
s = "\nException in %s at block ending on line %s\n" % (filename, lineno)
s += "Specify :okexcept: as an option in the ipython:: block to suppress this message\n"
sys.stdout.write('\n\n>>>' + ('-' * 73))
sys.stdout.write(s)
sys.stdout.write(output)
sys.stdout.write('<<<' + ('-' * 73) + '\n\n')
# output any warning raised during execution to stdout
# unless :okwarning: has been specified.
if not is_okwarning:
for w in ws:
s = "\nWarning in %s at block ending on line %s\n" % (filename, lineno)
s += "Specify :okwarning: as an option in the ipython:: block to suppress this message\n"
sys.stdout.write('\n\n>>>' + ('-' * 73))
sys.stdout.write(s)
sys.stdout.write('-' * 76 + '\n')
s=warnings.formatwarning(w.message, w.category,
w.filename, w.lineno, w.line)
sys.stdout.write(s)
sys.stdout.write('<<<' + ('-' * 73) + '\n')
self.cout.truncate(0)
return (ret, input_lines, output, is_doctest, decorator, image_file,
image_directive)
|
Process data block for INPUT token.
|
train
|
https://github.com/TomAugspurger/engarde/blob/e7ea040cf0d20aee7ca4375b8c27caa2d9e43945/docs/sphinxext/ipython_directive.py#L386-L499
| null |
class EmbeddedSphinxShell(object):
"""An embedded IPython instance to run inside Sphinx"""
def __init__(self, exec_lines=None,state=None):
self.cout = DecodingStringIO(u'')
if exec_lines is None:
exec_lines = []
self.state = state
# Create config object for IPython
config = Config()
config.InteractiveShell.autocall = False
config.InteractiveShell.autoindent = False
config.InteractiveShell.colors = 'NoColor'
# create a profile so instance history isn't saved
tmp_profile_dir = tempfile.mkdtemp(prefix='profile_')
profname = 'auto_profile_sphinx_build'
pdir = os.path.join(tmp_profile_dir,profname)
profile = ProfileDir.create_profile_dir(pdir)
# Create and initialize global ipython, but don't start its mainloop.
# This will persist across different EmbededSphinxShell instances.
IP = InteractiveShell.instance(config=config, profile_dir=profile)
# io.stdout redirect must be done after instantiating InteractiveShell
io.stdout = self.cout
io.stderr = self.cout
# For debugging, so we can see normal output, use this:
#from IPython.utils.io import Tee
#io.stdout = Tee(self.cout, channel='stdout') # dbg
#io.stderr = Tee(self.cout, channel='stderr') # dbg
# Store a few parts of IPython we'll need.
self.IP = IP
self.user_ns = self.IP.user_ns
self.user_global_ns = self.IP.user_global_ns
self.input = ''
self.output = ''
self.is_verbatim = False
self.is_doctest = False
self.is_suppress = False
# Optionally, provide more detailed information to shell.
self.directive = None
# on the first call to the savefig decorator, we'll import
# pyplot as plt so we can make a call to the plt.gcf().savefig
self._pyplot_imported = False
# Prepopulate the namespace.
for line in exec_lines:
self.process_input_line(line, store_history=False)
def clear_cout(self):
self.cout.seek(0)
self.cout.truncate(0)
def process_input_line(self, line, store_history=True):
"""process the input, capturing stdout"""
stdout = sys.stdout
splitter = self.IP.input_splitter
try:
sys.stdout = self.cout
splitter.push(line)
more = splitter.push_accepts_more()
if not more:
try:
source_raw = splitter.source_raw_reset()[1]
except:
# recent ipython #4504
source_raw = splitter.raw_reset()
self.IP.run_cell(source_raw, store_history=store_history)
finally:
sys.stdout = stdout
def process_image(self, decorator):
"""
# build out an image directive like
# .. image:: somefile.png
# :width 4in
#
# from an input like
# savefig somefile.png width=4in
"""
savefig_dir = self.savefig_dir
source_dir = self.source_dir
saveargs = decorator.split(' ')
filename = saveargs[1]
# insert relative path to image file in source
outfile = os.path.relpath(os.path.join(savefig_dir,filename),
source_dir)
imagerows = ['.. image:: %s'%outfile]
for kwarg in saveargs[2:]:
arg, val = kwarg.split('=')
arg = arg.strip()
val = val.strip()
imagerows.append(' :%s: %s'%(arg, val))
image_file = os.path.basename(outfile) # only return file name
image_directive = '\n'.join(imagerows)
return image_file, image_directive
# Callbacks for each type of token
def process_output(self, data, output_prompt,
input_lines, output, is_doctest, decorator, image_file):
"""
Process data block for OUTPUT token.
"""
TAB = ' ' * 4
if is_doctest and output is not None:
found = output
found = found.strip()
submitted = data.strip()
if self.directive is None:
source = 'Unavailable'
content = 'Unavailable'
else:
source = self.directive.state.document.current_source
content = self.directive.content
# Add tabs and join into a single string.
content = '\n'.join([TAB + line for line in content])
# Make sure the output contains the output prompt.
ind = found.find(output_prompt)
if ind < 0:
e = ('output does not contain output prompt\n\n'
'Document source: {0}\n\n'
'Raw content: \n{1}\n\n'
'Input line(s):\n{TAB}{2}\n\n'
'Output line(s):\n{TAB}{3}\n\n')
e = e.format(source, content, '\n'.join(input_lines),
repr(found), TAB=TAB)
raise RuntimeError(e)
found = found[len(output_prompt):].strip()
# Handle the actual doctest comparison.
if decorator.strip() == '@doctest':
# Standard doctest
if found != submitted:
e = ('doctest failure\n\n'
'Document source: {0}\n\n'
'Raw content: \n{1}\n\n'
'On input line(s):\n{TAB}{2}\n\n'
'we found output:\n{TAB}{3}\n\n'
'instead of the expected:\n{TAB}{4}\n\n')
e = e.format(source, content, '\n'.join(input_lines),
repr(found), repr(submitted), TAB=TAB)
raise RuntimeError(e)
else:
self.custom_doctest(decorator, input_lines, found, submitted)
def process_comment(self, data):
"""Process data fPblock for COMMENT token."""
if not self.is_suppress:
return [data]
def save_image(self, image_file):
"""
Saves the image file to disk.
"""
self.ensure_pyplot()
command = ('plt.gcf().savefig("%s", bbox_inches="tight", '
'dpi=100)' % image_file)
#print 'SAVEFIG', command # dbg
self.process_input_line('bookmark ipy_thisdir', store_history=False)
self.process_input_line('cd -b ipy_savedir', store_history=False)
self.process_input_line(command, store_history=False)
self.process_input_line('cd -b ipy_thisdir', store_history=False)
self.process_input_line('bookmark -d ipy_thisdir', store_history=False)
self.clear_cout()
def process_block(self, block):
"""
process block from the block_parser and return a list of processed lines
"""
ret = []
output = None
input_lines = None
lineno = self.IP.execution_count
input_prompt = self.promptin % lineno
output_prompt = self.promptout % lineno
image_file = None
image_directive = None
for token, data in block:
if token == COMMENT:
out_data = self.process_comment(data)
elif token == INPUT:
(out_data, input_lines, output, is_doctest, decorator,
image_file, image_directive) = \
self.process_input(data, input_prompt, lineno)
elif token == OUTPUT:
out_data = \
self.process_output(data, output_prompt,
input_lines, output, is_doctest,
decorator, image_file)
if out_data:
ret.extend(out_data)
# save the image files
if image_file is not None:
self.save_image(image_file)
return ret, image_directive
def ensure_pyplot(self):
"""
Ensures that pyplot has been imported into the embedded IPython shell.
Also, makes sure to set the backend appropriately if not set already.
"""
# We are here if the @figure pseudo decorator was used. Thus, it's
# possible that we could be here even if python_mplbackend were set to
# `None`. That's also strange and perhaps worthy of raising an
# exception, but for now, we just set the backend to 'agg'.
if not self._pyplot_imported:
if 'matplotlib.backends' not in sys.modules:
# Then ipython_matplotlib was set to None but there was a
# call to the @figure decorator (and ipython_execlines did
# not set a backend).
#raise Exception("No backend was set, but @figure was used!")
import matplotlib
matplotlib.use('agg')
# Always import pyplot into embedded shell.
self.process_input_line('import matplotlib.pyplot as plt',
store_history=False)
self._pyplot_imported = True
def process_pure_python(self, content):
"""
content is a list of strings. it is unedited directive content
This runs it line by line in the InteractiveShell, prepends
prompts as needed capturing stderr and stdout, then returns
the content as a list as if it were ipython code
"""
output = []
savefig = False # keep up with this to clear figure
multiline = False # to handle line continuation
multiline_start = None
fmtin = self.promptin
ct = 0
for lineno, line in enumerate(content):
line_stripped = line.strip()
if not len(line):
output.append(line)
continue
# handle decorators
if line_stripped.startswith('@'):
output.extend([line])
if 'savefig' in line:
savefig = True # and need to clear figure
continue
# handle comments
if line_stripped.startswith('#'):
output.extend([line])
continue
# deal with lines checking for multiline
continuation = u' %s:'% ''.join(['.']*(len(str(ct))+2))
if not multiline:
modified = u"%s %s" % (fmtin % ct, line_stripped)
output.append(modified)
ct += 1
try:
ast.parse(line_stripped)
output.append(u'')
except Exception: # on a multiline
multiline = True
multiline_start = lineno
else: # still on a multiline
modified = u'%s %s' % (continuation, line)
output.append(modified)
# if the next line is indented, it should be part of multiline
if len(content) > lineno + 1:
nextline = content[lineno + 1]
if len(nextline) - len(nextline.lstrip()) > 3:
continue
try:
mod = ast.parse(
'\n'.join(content[multiline_start:lineno+1]))
if isinstance(mod.body[0], ast.FunctionDef):
# check to see if we have the whole function
for element in mod.body[0].body:
if isinstance(element, ast.Return):
multiline = False
else:
output.append(u'')
multiline = False
except Exception:
pass
if savefig: # clear figure if plotted
self.ensure_pyplot()
self.process_input_line('plt.clf()', store_history=False)
self.clear_cout()
savefig = False
return output
def custom_doctest(self, decorator, input_lines, found, submitted):
"""
Perform a specialized doctest.
"""
from .custom_doctests import doctests
args = decorator.split()
doctest_type = args[1]
if doctest_type in doctests:
doctests[doctest_type](self, args, input_lines, found, submitted)
else:
e = "Invalid option to @doctest: {0}".format(doctest_type)
raise Exception(e)
|
TomAugspurger/engarde
|
docs/sphinxext/ipython_directive.py
|
EmbeddedSphinxShell.process_output
|
python
|
def process_output(self, data, output_prompt,
input_lines, output, is_doctest, decorator, image_file):
TAB = ' ' * 4
if is_doctest and output is not None:
found = output
found = found.strip()
submitted = data.strip()
if self.directive is None:
source = 'Unavailable'
content = 'Unavailable'
else:
source = self.directive.state.document.current_source
content = self.directive.content
# Add tabs and join into a single string.
content = '\n'.join([TAB + line for line in content])
# Make sure the output contains the output prompt.
ind = found.find(output_prompt)
if ind < 0:
e = ('output does not contain output prompt\n\n'
'Document source: {0}\n\n'
'Raw content: \n{1}\n\n'
'Input line(s):\n{TAB}{2}\n\n'
'Output line(s):\n{TAB}{3}\n\n')
e = e.format(source, content, '\n'.join(input_lines),
repr(found), TAB=TAB)
raise RuntimeError(e)
found = found[len(output_prompt):].strip()
# Handle the actual doctest comparison.
if decorator.strip() == '@doctest':
# Standard doctest
if found != submitted:
e = ('doctest failure\n\n'
'Document source: {0}\n\n'
'Raw content: \n{1}\n\n'
'On input line(s):\n{TAB}{2}\n\n'
'we found output:\n{TAB}{3}\n\n'
'instead of the expected:\n{TAB}{4}\n\n')
e = e.format(source, content, '\n'.join(input_lines),
repr(found), repr(submitted), TAB=TAB)
raise RuntimeError(e)
else:
self.custom_doctest(decorator, input_lines, found, submitted)
|
Process data block for OUTPUT token.
|
train
|
https://github.com/TomAugspurger/engarde/blob/e7ea040cf0d20aee7ca4375b8c27caa2d9e43945/docs/sphinxext/ipython_directive.py#L502-L552
|
[
"def custom_doctest(self, decorator, input_lines, found, submitted):\n \"\"\"\n Perform a specialized doctest.\n\n \"\"\"\n from .custom_doctests import doctests\n\n args = decorator.split()\n doctest_type = args[1]\n if doctest_type in doctests:\n doctests[doctest_type](self, args, input_lines, found, submitted)\n else:\n e = \"Invalid option to @doctest: {0}\".format(doctest_type)\n raise Exception(e)\n"
] |
class EmbeddedSphinxShell(object):
"""An embedded IPython instance to run inside Sphinx"""
def __init__(self, exec_lines=None,state=None):
self.cout = DecodingStringIO(u'')
if exec_lines is None:
exec_lines = []
self.state = state
# Create config object for IPython
config = Config()
config.InteractiveShell.autocall = False
config.InteractiveShell.autoindent = False
config.InteractiveShell.colors = 'NoColor'
# create a profile so instance history isn't saved
tmp_profile_dir = tempfile.mkdtemp(prefix='profile_')
profname = 'auto_profile_sphinx_build'
pdir = os.path.join(tmp_profile_dir,profname)
profile = ProfileDir.create_profile_dir(pdir)
# Create and initialize global ipython, but don't start its mainloop.
# This will persist across different EmbededSphinxShell instances.
IP = InteractiveShell.instance(config=config, profile_dir=profile)
# io.stdout redirect must be done after instantiating InteractiveShell
io.stdout = self.cout
io.stderr = self.cout
# For debugging, so we can see normal output, use this:
#from IPython.utils.io import Tee
#io.stdout = Tee(self.cout, channel='stdout') # dbg
#io.stderr = Tee(self.cout, channel='stderr') # dbg
# Store a few parts of IPython we'll need.
self.IP = IP
self.user_ns = self.IP.user_ns
self.user_global_ns = self.IP.user_global_ns
self.input = ''
self.output = ''
self.is_verbatim = False
self.is_doctest = False
self.is_suppress = False
# Optionally, provide more detailed information to shell.
self.directive = None
# on the first call to the savefig decorator, we'll import
# pyplot as plt so we can make a call to the plt.gcf().savefig
self._pyplot_imported = False
# Prepopulate the namespace.
for line in exec_lines:
self.process_input_line(line, store_history=False)
def clear_cout(self):
self.cout.seek(0)
self.cout.truncate(0)
def process_input_line(self, line, store_history=True):
"""process the input, capturing stdout"""
stdout = sys.stdout
splitter = self.IP.input_splitter
try:
sys.stdout = self.cout
splitter.push(line)
more = splitter.push_accepts_more()
if not more:
try:
source_raw = splitter.source_raw_reset()[1]
except:
# recent ipython #4504
source_raw = splitter.raw_reset()
self.IP.run_cell(source_raw, store_history=store_history)
finally:
sys.stdout = stdout
def process_image(self, decorator):
"""
# build out an image directive like
# .. image:: somefile.png
# :width 4in
#
# from an input like
# savefig somefile.png width=4in
"""
savefig_dir = self.savefig_dir
source_dir = self.source_dir
saveargs = decorator.split(' ')
filename = saveargs[1]
# insert relative path to image file in source
outfile = os.path.relpath(os.path.join(savefig_dir,filename),
source_dir)
imagerows = ['.. image:: %s'%outfile]
for kwarg in saveargs[2:]:
arg, val = kwarg.split('=')
arg = arg.strip()
val = val.strip()
imagerows.append(' :%s: %s'%(arg, val))
image_file = os.path.basename(outfile) # only return file name
image_directive = '\n'.join(imagerows)
return image_file, image_directive
# Callbacks for each type of token
def process_input(self, data, input_prompt, lineno):
"""
Process data block for INPUT token.
"""
decorator, input, rest = data
image_file = None
image_directive = None
is_verbatim = decorator=='@verbatim' or self.is_verbatim
is_doctest = (decorator is not None and \
decorator.startswith('@doctest')) or self.is_doctest
is_suppress = decorator=='@suppress' or self.is_suppress
is_okexcept = decorator=='@okexcept' or self.is_okexcept
is_okwarning = decorator=='@okwarning' or self.is_okwarning
is_savefig = decorator is not None and \
decorator.startswith('@savefig')
# set the encodings to be used by DecodingStringIO
# to convert the execution output into unicode if
# needed. this attrib is set by IpythonDirective.run()
# based on the specified block options, defaulting to ['ut
self.cout.set_encodings(self.output_encoding)
input_lines = input.split('\n')
if len(input_lines) > 1:
if input_lines[-1] != "":
input_lines.append('') # make sure there's a blank line
# so splitter buffer gets reset
continuation = ' %s:'%''.join(['.']*(len(str(lineno))+2))
if is_savefig:
image_file, image_directive = self.process_image(decorator)
ret = []
is_semicolon = False
# Hold the execution count, if requested to do so.
if is_suppress and self.hold_count:
store_history = False
else:
store_history = True
# Note: catch_warnings is not thread safe
with warnings.catch_warnings(record=True) as ws:
for i, line in enumerate(input_lines):
if line.endswith(';'):
is_semicolon = True
if i == 0:
# process the first input line
if is_verbatim:
self.process_input_line('')
self.IP.execution_count += 1 # increment it anyway
else:
# only submit the line in non-verbatim mode
self.process_input_line(line, store_history=store_history)
formatted_line = '%s %s'%(input_prompt, line)
else:
# process a continuation line
if not is_verbatim:
self.process_input_line(line, store_history=store_history)
formatted_line = '%s %s'%(continuation, line)
if not is_suppress:
ret.append(formatted_line)
if not is_suppress and len(rest.strip()) and is_verbatim:
# the "rest" is the standard output of the
# input, which needs to be added in
# verbatim mode
ret.append(rest)
self.cout.seek(0)
output = self.cout.read()
if not is_suppress and not is_semicolon:
ret.append(output)
elif is_semicolon: # get spacing right
ret.append('')
# context information
filename = self.state.document.current_source
lineno = self.state.document.current_line
# output any exceptions raised during execution to stdout
# unless :okexcept: has been specified.
if not is_okexcept and "Traceback" in output:
s = "\nException in %s at block ending on line %s\n" % (filename, lineno)
s += "Specify :okexcept: as an option in the ipython:: block to suppress this message\n"
sys.stdout.write('\n\n>>>' + ('-' * 73))
sys.stdout.write(s)
sys.stdout.write(output)
sys.stdout.write('<<<' + ('-' * 73) + '\n\n')
# output any warning raised during execution to stdout
# unless :okwarning: has been specified.
if not is_okwarning:
for w in ws:
s = "\nWarning in %s at block ending on line %s\n" % (filename, lineno)
s += "Specify :okwarning: as an option in the ipython:: block to suppress this message\n"
sys.stdout.write('\n\n>>>' + ('-' * 73))
sys.stdout.write(s)
sys.stdout.write('-' * 76 + '\n')
s=warnings.formatwarning(w.message, w.category,
w.filename, w.lineno, w.line)
sys.stdout.write(s)
sys.stdout.write('<<<' + ('-' * 73) + '\n')
self.cout.truncate(0)
return (ret, input_lines, output, is_doctest, decorator, image_file,
image_directive)
def process_comment(self, data):
"""Process data fPblock for COMMENT token."""
if not self.is_suppress:
return [data]
def save_image(self, image_file):
"""
Saves the image file to disk.
"""
self.ensure_pyplot()
command = ('plt.gcf().savefig("%s", bbox_inches="tight", '
'dpi=100)' % image_file)
#print 'SAVEFIG', command # dbg
self.process_input_line('bookmark ipy_thisdir', store_history=False)
self.process_input_line('cd -b ipy_savedir', store_history=False)
self.process_input_line(command, store_history=False)
self.process_input_line('cd -b ipy_thisdir', store_history=False)
self.process_input_line('bookmark -d ipy_thisdir', store_history=False)
self.clear_cout()
def process_block(self, block):
"""
process block from the block_parser and return a list of processed lines
"""
ret = []
output = None
input_lines = None
lineno = self.IP.execution_count
input_prompt = self.promptin % lineno
output_prompt = self.promptout % lineno
image_file = None
image_directive = None
for token, data in block:
if token == COMMENT:
out_data = self.process_comment(data)
elif token == INPUT:
(out_data, input_lines, output, is_doctest, decorator,
image_file, image_directive) = \
self.process_input(data, input_prompt, lineno)
elif token == OUTPUT:
out_data = \
self.process_output(data, output_prompt,
input_lines, output, is_doctest,
decorator, image_file)
if out_data:
ret.extend(out_data)
# save the image files
if image_file is not None:
self.save_image(image_file)
return ret, image_directive
def ensure_pyplot(self):
"""
Ensures that pyplot has been imported into the embedded IPython shell.
Also, makes sure to set the backend appropriately if not set already.
"""
# We are here if the @figure pseudo decorator was used. Thus, it's
# possible that we could be here even if python_mplbackend were set to
# `None`. That's also strange and perhaps worthy of raising an
# exception, but for now, we just set the backend to 'agg'.
if not self._pyplot_imported:
if 'matplotlib.backends' not in sys.modules:
# Then ipython_matplotlib was set to None but there was a
# call to the @figure decorator (and ipython_execlines did
# not set a backend).
#raise Exception("No backend was set, but @figure was used!")
import matplotlib
matplotlib.use('agg')
# Always import pyplot into embedded shell.
self.process_input_line('import matplotlib.pyplot as plt',
store_history=False)
self._pyplot_imported = True
def process_pure_python(self, content):
"""
content is a list of strings. it is unedited directive content
This runs it line by line in the InteractiveShell, prepends
prompts as needed capturing stderr and stdout, then returns
the content as a list as if it were ipython code
"""
output = []
savefig = False # keep up with this to clear figure
multiline = False # to handle line continuation
multiline_start = None
fmtin = self.promptin
ct = 0
for lineno, line in enumerate(content):
line_stripped = line.strip()
if not len(line):
output.append(line)
continue
# handle decorators
if line_stripped.startswith('@'):
output.extend([line])
if 'savefig' in line:
savefig = True # and need to clear figure
continue
# handle comments
if line_stripped.startswith('#'):
output.extend([line])
continue
# deal with lines checking for multiline
continuation = u' %s:'% ''.join(['.']*(len(str(ct))+2))
if not multiline:
modified = u"%s %s" % (fmtin % ct, line_stripped)
output.append(modified)
ct += 1
try:
ast.parse(line_stripped)
output.append(u'')
except Exception: # on a multiline
multiline = True
multiline_start = lineno
else: # still on a multiline
modified = u'%s %s' % (continuation, line)
output.append(modified)
# if the next line is indented, it should be part of multiline
if len(content) > lineno + 1:
nextline = content[lineno + 1]
if len(nextline) - len(nextline.lstrip()) > 3:
continue
try:
mod = ast.parse(
'\n'.join(content[multiline_start:lineno+1]))
if isinstance(mod.body[0], ast.FunctionDef):
# check to see if we have the whole function
for element in mod.body[0].body:
if isinstance(element, ast.Return):
multiline = False
else:
output.append(u'')
multiline = False
except Exception:
pass
if savefig: # clear figure if plotted
self.ensure_pyplot()
self.process_input_line('plt.clf()', store_history=False)
self.clear_cout()
savefig = False
return output
def custom_doctest(self, decorator, input_lines, found, submitted):
"""
Perform a specialized doctest.
"""
from .custom_doctests import doctests
args = decorator.split()
doctest_type = args[1]
if doctest_type in doctests:
doctests[doctest_type](self, args, input_lines, found, submitted)
else:
e = "Invalid option to @doctest: {0}".format(doctest_type)
raise Exception(e)
|
TomAugspurger/engarde
|
docs/sphinxext/ipython_directive.py
|
EmbeddedSphinxShell.process_block
|
python
|
def process_block(self, block):
ret = []
output = None
input_lines = None
lineno = self.IP.execution_count
input_prompt = self.promptin % lineno
output_prompt = self.promptout % lineno
image_file = None
image_directive = None
for token, data in block:
if token == COMMENT:
out_data = self.process_comment(data)
elif token == INPUT:
(out_data, input_lines, output, is_doctest, decorator,
image_file, image_directive) = \
self.process_input(data, input_prompt, lineno)
elif token == OUTPUT:
out_data = \
self.process_output(data, output_prompt,
input_lines, output, is_doctest,
decorator, image_file)
if out_data:
ret.extend(out_data)
# save the image files
if image_file is not None:
self.save_image(image_file)
return ret, image_directive
|
process block from the block_parser and return a list of processed lines
|
train
|
https://github.com/TomAugspurger/engarde/blob/e7ea040cf0d20aee7ca4375b8c27caa2d9e43945/docs/sphinxext/ipython_directive.py#L575-L608
|
[
"def process_input(self, data, input_prompt, lineno):\n \"\"\"\n Process data block for INPUT token.\n\n \"\"\"\n decorator, input, rest = data\n image_file = None\n image_directive = None\n\n is_verbatim = decorator=='@verbatim' or self.is_verbatim\n is_doctest = (decorator is not None and \\\n decorator.startswith('@doctest')) or self.is_doctest\n is_suppress = decorator=='@suppress' or self.is_suppress\n is_okexcept = decorator=='@okexcept' or self.is_okexcept\n is_okwarning = decorator=='@okwarning' or self.is_okwarning\n is_savefig = decorator is not None and \\\n decorator.startswith('@savefig')\n\n # set the encodings to be used by DecodingStringIO\n # to convert the execution output into unicode if\n # needed. this attrib is set by IpythonDirective.run()\n # based on the specified block options, defaulting to ['ut\n self.cout.set_encodings(self.output_encoding)\n\n input_lines = input.split('\\n')\n\n if len(input_lines) > 1:\n if input_lines[-1] != \"\":\n input_lines.append('') # make sure there's a blank line\n # so splitter buffer gets reset\n\n continuation = ' %s:'%''.join(['.']*(len(str(lineno))+2))\n\n if is_savefig:\n image_file, image_directive = self.process_image(decorator)\n\n ret = []\n is_semicolon = False\n\n # Hold the execution count, if requested to do so.\n if is_suppress and self.hold_count:\n store_history = False\n else:\n store_history = True\n\n # Note: catch_warnings is not thread safe\n with warnings.catch_warnings(record=True) as ws:\n for i, line in enumerate(input_lines):\n if line.endswith(';'):\n is_semicolon = True\n\n if i == 0:\n # process the first input line\n if is_verbatim:\n self.process_input_line('')\n self.IP.execution_count += 1 # increment it anyway\n else:\n # only submit the line in non-verbatim mode\n self.process_input_line(line, store_history=store_history)\n formatted_line = '%s %s'%(input_prompt, line)\n else:\n # process a continuation line\n if not is_verbatim:\n self.process_input_line(line, store_history=store_history)\n\n formatted_line = '%s %s'%(continuation, line)\n\n if not is_suppress:\n ret.append(formatted_line)\n\n if not is_suppress and len(rest.strip()) and is_verbatim:\n # the \"rest\" is the standard output of the\n # input, which needs to be added in\n # verbatim mode\n ret.append(rest)\n\n self.cout.seek(0)\n output = self.cout.read()\n if not is_suppress and not is_semicolon:\n ret.append(output)\n elif is_semicolon: # get spacing right\n ret.append('')\n\n # context information\n filename = self.state.document.current_source\n lineno = self.state.document.current_line\n\n # output any exceptions raised during execution to stdout\n # unless :okexcept: has been specified.\n if not is_okexcept and \"Traceback\" in output:\n s = \"\\nException in %s at block ending on line %s\\n\" % (filename, lineno)\n s += \"Specify :okexcept: as an option in the ipython:: block to suppress this message\\n\"\n sys.stdout.write('\\n\\n>>>' + ('-' * 73))\n sys.stdout.write(s)\n sys.stdout.write(output)\n sys.stdout.write('<<<' + ('-' * 73) + '\\n\\n')\n\n # output any warning raised during execution to stdout\n # unless :okwarning: has been specified.\n if not is_okwarning:\n for w in ws:\n s = \"\\nWarning in %s at block ending on line %s\\n\" % (filename, lineno)\n s += \"Specify :okwarning: as an option in the ipython:: block to suppress this message\\n\"\n sys.stdout.write('\\n\\n>>>' + ('-' * 73))\n sys.stdout.write(s)\n sys.stdout.write('-' * 76 + '\\n')\n s=warnings.formatwarning(w.message, w.category,\n w.filename, w.lineno, w.line)\n sys.stdout.write(s)\n sys.stdout.write('<<<' + ('-' * 73) + '\\n')\n\n self.cout.truncate(0)\n return (ret, input_lines, output, is_doctest, decorator, image_file,\n image_directive)\n",
"def process_output(self, data, output_prompt,\n input_lines, output, is_doctest, decorator, image_file):\n \"\"\"\n Process data block for OUTPUT token.\n\n \"\"\"\n TAB = ' ' * 4\n\n if is_doctest and output is not None:\n\n found = output\n found = found.strip()\n submitted = data.strip()\n\n if self.directive is None:\n source = 'Unavailable'\n content = 'Unavailable'\n else:\n source = self.directive.state.document.current_source\n content = self.directive.content\n # Add tabs and join into a single string.\n content = '\\n'.join([TAB + line for line in content])\n\n # Make sure the output contains the output prompt.\n ind = found.find(output_prompt)\n if ind < 0:\n e = ('output does not contain output prompt\\n\\n'\n 'Document source: {0}\\n\\n'\n 'Raw content: \\n{1}\\n\\n'\n 'Input line(s):\\n{TAB}{2}\\n\\n'\n 'Output line(s):\\n{TAB}{3}\\n\\n')\n e = e.format(source, content, '\\n'.join(input_lines),\n repr(found), TAB=TAB)\n raise RuntimeError(e)\n found = found[len(output_prompt):].strip()\n\n # Handle the actual doctest comparison.\n if decorator.strip() == '@doctest':\n # Standard doctest\n if found != submitted:\n e = ('doctest failure\\n\\n'\n 'Document source: {0}\\n\\n'\n 'Raw content: \\n{1}\\n\\n'\n 'On input line(s):\\n{TAB}{2}\\n\\n'\n 'we found output:\\n{TAB}{3}\\n\\n'\n 'instead of the expected:\\n{TAB}{4}\\n\\n')\n e = e.format(source, content, '\\n'.join(input_lines),\n repr(found), repr(submitted), TAB=TAB)\n raise RuntimeError(e)\n else:\n self.custom_doctest(decorator, input_lines, found, submitted)\n",
"def process_comment(self, data):\n \"\"\"Process data fPblock for COMMENT token.\"\"\"\n if not self.is_suppress:\n return [data]\n",
"def save_image(self, image_file):\n \"\"\"\n Saves the image file to disk.\n \"\"\"\n self.ensure_pyplot()\n command = ('plt.gcf().savefig(\"%s\", bbox_inches=\"tight\", '\n 'dpi=100)' % image_file)\n\n #print 'SAVEFIG', command # dbg\n self.process_input_line('bookmark ipy_thisdir', store_history=False)\n self.process_input_line('cd -b ipy_savedir', store_history=False)\n self.process_input_line(command, store_history=False)\n self.process_input_line('cd -b ipy_thisdir', store_history=False)\n self.process_input_line('bookmark -d ipy_thisdir', store_history=False)\n self.clear_cout()\n"
] |
class EmbeddedSphinxShell(object):
"""An embedded IPython instance to run inside Sphinx"""
def __init__(self, exec_lines=None,state=None):
self.cout = DecodingStringIO(u'')
if exec_lines is None:
exec_lines = []
self.state = state
# Create config object for IPython
config = Config()
config.InteractiveShell.autocall = False
config.InteractiveShell.autoindent = False
config.InteractiveShell.colors = 'NoColor'
# create a profile so instance history isn't saved
tmp_profile_dir = tempfile.mkdtemp(prefix='profile_')
profname = 'auto_profile_sphinx_build'
pdir = os.path.join(tmp_profile_dir,profname)
profile = ProfileDir.create_profile_dir(pdir)
# Create and initialize global ipython, but don't start its mainloop.
# This will persist across different EmbededSphinxShell instances.
IP = InteractiveShell.instance(config=config, profile_dir=profile)
# io.stdout redirect must be done after instantiating InteractiveShell
io.stdout = self.cout
io.stderr = self.cout
# For debugging, so we can see normal output, use this:
#from IPython.utils.io import Tee
#io.stdout = Tee(self.cout, channel='stdout') # dbg
#io.stderr = Tee(self.cout, channel='stderr') # dbg
# Store a few parts of IPython we'll need.
self.IP = IP
self.user_ns = self.IP.user_ns
self.user_global_ns = self.IP.user_global_ns
self.input = ''
self.output = ''
self.is_verbatim = False
self.is_doctest = False
self.is_suppress = False
# Optionally, provide more detailed information to shell.
self.directive = None
# on the first call to the savefig decorator, we'll import
# pyplot as plt so we can make a call to the plt.gcf().savefig
self._pyplot_imported = False
# Prepopulate the namespace.
for line in exec_lines:
self.process_input_line(line, store_history=False)
def clear_cout(self):
self.cout.seek(0)
self.cout.truncate(0)
def process_input_line(self, line, store_history=True):
"""process the input, capturing stdout"""
stdout = sys.stdout
splitter = self.IP.input_splitter
try:
sys.stdout = self.cout
splitter.push(line)
more = splitter.push_accepts_more()
if not more:
try:
source_raw = splitter.source_raw_reset()[1]
except:
# recent ipython #4504
source_raw = splitter.raw_reset()
self.IP.run_cell(source_raw, store_history=store_history)
finally:
sys.stdout = stdout
def process_image(self, decorator):
"""
# build out an image directive like
# .. image:: somefile.png
# :width 4in
#
# from an input like
# savefig somefile.png width=4in
"""
savefig_dir = self.savefig_dir
source_dir = self.source_dir
saveargs = decorator.split(' ')
filename = saveargs[1]
# insert relative path to image file in source
outfile = os.path.relpath(os.path.join(savefig_dir,filename),
source_dir)
imagerows = ['.. image:: %s'%outfile]
for kwarg in saveargs[2:]:
arg, val = kwarg.split('=')
arg = arg.strip()
val = val.strip()
imagerows.append(' :%s: %s'%(arg, val))
image_file = os.path.basename(outfile) # only return file name
image_directive = '\n'.join(imagerows)
return image_file, image_directive
# Callbacks for each type of token
def process_input(self, data, input_prompt, lineno):
"""
Process data block for INPUT token.
"""
decorator, input, rest = data
image_file = None
image_directive = None
is_verbatim = decorator=='@verbatim' or self.is_verbatim
is_doctest = (decorator is not None and \
decorator.startswith('@doctest')) or self.is_doctest
is_suppress = decorator=='@suppress' or self.is_suppress
is_okexcept = decorator=='@okexcept' or self.is_okexcept
is_okwarning = decorator=='@okwarning' or self.is_okwarning
is_savefig = decorator is not None and \
decorator.startswith('@savefig')
# set the encodings to be used by DecodingStringIO
# to convert the execution output into unicode if
# needed. this attrib is set by IpythonDirective.run()
# based on the specified block options, defaulting to ['ut
self.cout.set_encodings(self.output_encoding)
input_lines = input.split('\n')
if len(input_lines) > 1:
if input_lines[-1] != "":
input_lines.append('') # make sure there's a blank line
# so splitter buffer gets reset
continuation = ' %s:'%''.join(['.']*(len(str(lineno))+2))
if is_savefig:
image_file, image_directive = self.process_image(decorator)
ret = []
is_semicolon = False
# Hold the execution count, if requested to do so.
if is_suppress and self.hold_count:
store_history = False
else:
store_history = True
# Note: catch_warnings is not thread safe
with warnings.catch_warnings(record=True) as ws:
for i, line in enumerate(input_lines):
if line.endswith(';'):
is_semicolon = True
if i == 0:
# process the first input line
if is_verbatim:
self.process_input_line('')
self.IP.execution_count += 1 # increment it anyway
else:
# only submit the line in non-verbatim mode
self.process_input_line(line, store_history=store_history)
formatted_line = '%s %s'%(input_prompt, line)
else:
# process a continuation line
if not is_verbatim:
self.process_input_line(line, store_history=store_history)
formatted_line = '%s %s'%(continuation, line)
if not is_suppress:
ret.append(formatted_line)
if not is_suppress and len(rest.strip()) and is_verbatim:
# the "rest" is the standard output of the
# input, which needs to be added in
# verbatim mode
ret.append(rest)
self.cout.seek(0)
output = self.cout.read()
if not is_suppress and not is_semicolon:
ret.append(output)
elif is_semicolon: # get spacing right
ret.append('')
# context information
filename = self.state.document.current_source
lineno = self.state.document.current_line
# output any exceptions raised during execution to stdout
# unless :okexcept: has been specified.
if not is_okexcept and "Traceback" in output:
s = "\nException in %s at block ending on line %s\n" % (filename, lineno)
s += "Specify :okexcept: as an option in the ipython:: block to suppress this message\n"
sys.stdout.write('\n\n>>>' + ('-' * 73))
sys.stdout.write(s)
sys.stdout.write(output)
sys.stdout.write('<<<' + ('-' * 73) + '\n\n')
# output any warning raised during execution to stdout
# unless :okwarning: has been specified.
if not is_okwarning:
for w in ws:
s = "\nWarning in %s at block ending on line %s\n" % (filename, lineno)
s += "Specify :okwarning: as an option in the ipython:: block to suppress this message\n"
sys.stdout.write('\n\n>>>' + ('-' * 73))
sys.stdout.write(s)
sys.stdout.write('-' * 76 + '\n')
s=warnings.formatwarning(w.message, w.category,
w.filename, w.lineno, w.line)
sys.stdout.write(s)
sys.stdout.write('<<<' + ('-' * 73) + '\n')
self.cout.truncate(0)
return (ret, input_lines, output, is_doctest, decorator, image_file,
image_directive)
def process_output(self, data, output_prompt,
input_lines, output, is_doctest, decorator, image_file):
"""
Process data block for OUTPUT token.
"""
TAB = ' ' * 4
if is_doctest and output is not None:
found = output
found = found.strip()
submitted = data.strip()
if self.directive is None:
source = 'Unavailable'
content = 'Unavailable'
else:
source = self.directive.state.document.current_source
content = self.directive.content
# Add tabs and join into a single string.
content = '\n'.join([TAB + line for line in content])
# Make sure the output contains the output prompt.
ind = found.find(output_prompt)
if ind < 0:
e = ('output does not contain output prompt\n\n'
'Document source: {0}\n\n'
'Raw content: \n{1}\n\n'
'Input line(s):\n{TAB}{2}\n\n'
'Output line(s):\n{TAB}{3}\n\n')
e = e.format(source, content, '\n'.join(input_lines),
repr(found), TAB=TAB)
raise RuntimeError(e)
found = found[len(output_prompt):].strip()
# Handle the actual doctest comparison.
if decorator.strip() == '@doctest':
# Standard doctest
if found != submitted:
e = ('doctest failure\n\n'
'Document source: {0}\n\n'
'Raw content: \n{1}\n\n'
'On input line(s):\n{TAB}{2}\n\n'
'we found output:\n{TAB}{3}\n\n'
'instead of the expected:\n{TAB}{4}\n\n')
e = e.format(source, content, '\n'.join(input_lines),
repr(found), repr(submitted), TAB=TAB)
raise RuntimeError(e)
else:
self.custom_doctest(decorator, input_lines, found, submitted)
def process_comment(self, data):
"""Process data fPblock for COMMENT token."""
if not self.is_suppress:
return [data]
def save_image(self, image_file):
"""
Saves the image file to disk.
"""
self.ensure_pyplot()
command = ('plt.gcf().savefig("%s", bbox_inches="tight", '
'dpi=100)' % image_file)
#print 'SAVEFIG', command # dbg
self.process_input_line('bookmark ipy_thisdir', store_history=False)
self.process_input_line('cd -b ipy_savedir', store_history=False)
self.process_input_line(command, store_history=False)
self.process_input_line('cd -b ipy_thisdir', store_history=False)
self.process_input_line('bookmark -d ipy_thisdir', store_history=False)
self.clear_cout()
def ensure_pyplot(self):
"""
Ensures that pyplot has been imported into the embedded IPython shell.
Also, makes sure to set the backend appropriately if not set already.
"""
# We are here if the @figure pseudo decorator was used. Thus, it's
# possible that we could be here even if python_mplbackend were set to
# `None`. That's also strange and perhaps worthy of raising an
# exception, but for now, we just set the backend to 'agg'.
if not self._pyplot_imported:
if 'matplotlib.backends' not in sys.modules:
# Then ipython_matplotlib was set to None but there was a
# call to the @figure decorator (and ipython_execlines did
# not set a backend).
#raise Exception("No backend was set, but @figure was used!")
import matplotlib
matplotlib.use('agg')
# Always import pyplot into embedded shell.
self.process_input_line('import matplotlib.pyplot as plt',
store_history=False)
self._pyplot_imported = True
def process_pure_python(self, content):
"""
content is a list of strings. it is unedited directive content
This runs it line by line in the InteractiveShell, prepends
prompts as needed capturing stderr and stdout, then returns
the content as a list as if it were ipython code
"""
output = []
savefig = False # keep up with this to clear figure
multiline = False # to handle line continuation
multiline_start = None
fmtin = self.promptin
ct = 0
for lineno, line in enumerate(content):
line_stripped = line.strip()
if not len(line):
output.append(line)
continue
# handle decorators
if line_stripped.startswith('@'):
output.extend([line])
if 'savefig' in line:
savefig = True # and need to clear figure
continue
# handle comments
if line_stripped.startswith('#'):
output.extend([line])
continue
# deal with lines checking for multiline
continuation = u' %s:'% ''.join(['.']*(len(str(ct))+2))
if not multiline:
modified = u"%s %s" % (fmtin % ct, line_stripped)
output.append(modified)
ct += 1
try:
ast.parse(line_stripped)
output.append(u'')
except Exception: # on a multiline
multiline = True
multiline_start = lineno
else: # still on a multiline
modified = u'%s %s' % (continuation, line)
output.append(modified)
# if the next line is indented, it should be part of multiline
if len(content) > lineno + 1:
nextline = content[lineno + 1]
if len(nextline) - len(nextline.lstrip()) > 3:
continue
try:
mod = ast.parse(
'\n'.join(content[multiline_start:lineno+1]))
if isinstance(mod.body[0], ast.FunctionDef):
# check to see if we have the whole function
for element in mod.body[0].body:
if isinstance(element, ast.Return):
multiline = False
else:
output.append(u'')
multiline = False
except Exception:
pass
if savefig: # clear figure if plotted
self.ensure_pyplot()
self.process_input_line('plt.clf()', store_history=False)
self.clear_cout()
savefig = False
return output
def custom_doctest(self, decorator, input_lines, found, submitted):
"""
Perform a specialized doctest.
"""
from .custom_doctests import doctests
args = decorator.split()
doctest_type = args[1]
if doctest_type in doctests:
doctests[doctest_type](self, args, input_lines, found, submitted)
else:
e = "Invalid option to @doctest: {0}".format(doctest_type)
raise Exception(e)
|
TomAugspurger/engarde
|
docs/sphinxext/ipython_directive.py
|
EmbeddedSphinxShell.ensure_pyplot
|
python
|
def ensure_pyplot(self):
# We are here if the @figure pseudo decorator was used. Thus, it's
# possible that we could be here even if python_mplbackend were set to
# `None`. That's also strange and perhaps worthy of raising an
# exception, but for now, we just set the backend to 'agg'.
if not self._pyplot_imported:
if 'matplotlib.backends' not in sys.modules:
# Then ipython_matplotlib was set to None but there was a
# call to the @figure decorator (and ipython_execlines did
# not set a backend).
#raise Exception("No backend was set, but @figure was used!")
import matplotlib
matplotlib.use('agg')
# Always import pyplot into embedded shell.
self.process_input_line('import matplotlib.pyplot as plt',
store_history=False)
self._pyplot_imported = True
|
Ensures that pyplot has been imported into the embedded IPython shell.
Also, makes sure to set the backend appropriately if not set already.
|
train
|
https://github.com/TomAugspurger/engarde/blob/e7ea040cf0d20aee7ca4375b8c27caa2d9e43945/docs/sphinxext/ipython_directive.py#L610-L634
|
[
"def process_input_line(self, line, store_history=True):\n \"\"\"process the input, capturing stdout\"\"\"\n\n stdout = sys.stdout\n splitter = self.IP.input_splitter\n try:\n sys.stdout = self.cout\n splitter.push(line)\n more = splitter.push_accepts_more()\n if not more:\n try:\n source_raw = splitter.source_raw_reset()[1]\n except:\n # recent ipython #4504\n source_raw = splitter.raw_reset()\n self.IP.run_cell(source_raw, store_history=store_history)\n finally:\n sys.stdout = stdout\n"
] |
class EmbeddedSphinxShell(object):
"""An embedded IPython instance to run inside Sphinx"""
def __init__(self, exec_lines=None,state=None):
self.cout = DecodingStringIO(u'')
if exec_lines is None:
exec_lines = []
self.state = state
# Create config object for IPython
config = Config()
config.InteractiveShell.autocall = False
config.InteractiveShell.autoindent = False
config.InteractiveShell.colors = 'NoColor'
# create a profile so instance history isn't saved
tmp_profile_dir = tempfile.mkdtemp(prefix='profile_')
profname = 'auto_profile_sphinx_build'
pdir = os.path.join(tmp_profile_dir,profname)
profile = ProfileDir.create_profile_dir(pdir)
# Create and initialize global ipython, but don't start its mainloop.
# This will persist across different EmbededSphinxShell instances.
IP = InteractiveShell.instance(config=config, profile_dir=profile)
# io.stdout redirect must be done after instantiating InteractiveShell
io.stdout = self.cout
io.stderr = self.cout
# For debugging, so we can see normal output, use this:
#from IPython.utils.io import Tee
#io.stdout = Tee(self.cout, channel='stdout') # dbg
#io.stderr = Tee(self.cout, channel='stderr') # dbg
# Store a few parts of IPython we'll need.
self.IP = IP
self.user_ns = self.IP.user_ns
self.user_global_ns = self.IP.user_global_ns
self.input = ''
self.output = ''
self.is_verbatim = False
self.is_doctest = False
self.is_suppress = False
# Optionally, provide more detailed information to shell.
self.directive = None
# on the first call to the savefig decorator, we'll import
# pyplot as plt so we can make a call to the plt.gcf().savefig
self._pyplot_imported = False
# Prepopulate the namespace.
for line in exec_lines:
self.process_input_line(line, store_history=False)
def clear_cout(self):
self.cout.seek(0)
self.cout.truncate(0)
def process_input_line(self, line, store_history=True):
"""process the input, capturing stdout"""
stdout = sys.stdout
splitter = self.IP.input_splitter
try:
sys.stdout = self.cout
splitter.push(line)
more = splitter.push_accepts_more()
if not more:
try:
source_raw = splitter.source_raw_reset()[1]
except:
# recent ipython #4504
source_raw = splitter.raw_reset()
self.IP.run_cell(source_raw, store_history=store_history)
finally:
sys.stdout = stdout
def process_image(self, decorator):
"""
# build out an image directive like
# .. image:: somefile.png
# :width 4in
#
# from an input like
# savefig somefile.png width=4in
"""
savefig_dir = self.savefig_dir
source_dir = self.source_dir
saveargs = decorator.split(' ')
filename = saveargs[1]
# insert relative path to image file in source
outfile = os.path.relpath(os.path.join(savefig_dir,filename),
source_dir)
imagerows = ['.. image:: %s'%outfile]
for kwarg in saveargs[2:]:
arg, val = kwarg.split('=')
arg = arg.strip()
val = val.strip()
imagerows.append(' :%s: %s'%(arg, val))
image_file = os.path.basename(outfile) # only return file name
image_directive = '\n'.join(imagerows)
return image_file, image_directive
# Callbacks for each type of token
def process_input(self, data, input_prompt, lineno):
"""
Process data block for INPUT token.
"""
decorator, input, rest = data
image_file = None
image_directive = None
is_verbatim = decorator=='@verbatim' or self.is_verbatim
is_doctest = (decorator is not None and \
decorator.startswith('@doctest')) or self.is_doctest
is_suppress = decorator=='@suppress' or self.is_suppress
is_okexcept = decorator=='@okexcept' or self.is_okexcept
is_okwarning = decorator=='@okwarning' or self.is_okwarning
is_savefig = decorator is not None and \
decorator.startswith('@savefig')
# set the encodings to be used by DecodingStringIO
# to convert the execution output into unicode if
# needed. this attrib is set by IpythonDirective.run()
# based on the specified block options, defaulting to ['ut
self.cout.set_encodings(self.output_encoding)
input_lines = input.split('\n')
if len(input_lines) > 1:
if input_lines[-1] != "":
input_lines.append('') # make sure there's a blank line
# so splitter buffer gets reset
continuation = ' %s:'%''.join(['.']*(len(str(lineno))+2))
if is_savefig:
image_file, image_directive = self.process_image(decorator)
ret = []
is_semicolon = False
# Hold the execution count, if requested to do so.
if is_suppress and self.hold_count:
store_history = False
else:
store_history = True
# Note: catch_warnings is not thread safe
with warnings.catch_warnings(record=True) as ws:
for i, line in enumerate(input_lines):
if line.endswith(';'):
is_semicolon = True
if i == 0:
# process the first input line
if is_verbatim:
self.process_input_line('')
self.IP.execution_count += 1 # increment it anyway
else:
# only submit the line in non-verbatim mode
self.process_input_line(line, store_history=store_history)
formatted_line = '%s %s'%(input_prompt, line)
else:
# process a continuation line
if not is_verbatim:
self.process_input_line(line, store_history=store_history)
formatted_line = '%s %s'%(continuation, line)
if not is_suppress:
ret.append(formatted_line)
if not is_suppress and len(rest.strip()) and is_verbatim:
# the "rest" is the standard output of the
# input, which needs to be added in
# verbatim mode
ret.append(rest)
self.cout.seek(0)
output = self.cout.read()
if not is_suppress and not is_semicolon:
ret.append(output)
elif is_semicolon: # get spacing right
ret.append('')
# context information
filename = self.state.document.current_source
lineno = self.state.document.current_line
# output any exceptions raised during execution to stdout
# unless :okexcept: has been specified.
if not is_okexcept and "Traceback" in output:
s = "\nException in %s at block ending on line %s\n" % (filename, lineno)
s += "Specify :okexcept: as an option in the ipython:: block to suppress this message\n"
sys.stdout.write('\n\n>>>' + ('-' * 73))
sys.stdout.write(s)
sys.stdout.write(output)
sys.stdout.write('<<<' + ('-' * 73) + '\n\n')
# output any warning raised during execution to stdout
# unless :okwarning: has been specified.
if not is_okwarning:
for w in ws:
s = "\nWarning in %s at block ending on line %s\n" % (filename, lineno)
s += "Specify :okwarning: as an option in the ipython:: block to suppress this message\n"
sys.stdout.write('\n\n>>>' + ('-' * 73))
sys.stdout.write(s)
sys.stdout.write('-' * 76 + '\n')
s=warnings.formatwarning(w.message, w.category,
w.filename, w.lineno, w.line)
sys.stdout.write(s)
sys.stdout.write('<<<' + ('-' * 73) + '\n')
self.cout.truncate(0)
return (ret, input_lines, output, is_doctest, decorator, image_file,
image_directive)
def process_output(self, data, output_prompt,
input_lines, output, is_doctest, decorator, image_file):
"""
Process data block for OUTPUT token.
"""
TAB = ' ' * 4
if is_doctest and output is not None:
found = output
found = found.strip()
submitted = data.strip()
if self.directive is None:
source = 'Unavailable'
content = 'Unavailable'
else:
source = self.directive.state.document.current_source
content = self.directive.content
# Add tabs and join into a single string.
content = '\n'.join([TAB + line for line in content])
# Make sure the output contains the output prompt.
ind = found.find(output_prompt)
if ind < 0:
e = ('output does not contain output prompt\n\n'
'Document source: {0}\n\n'
'Raw content: \n{1}\n\n'
'Input line(s):\n{TAB}{2}\n\n'
'Output line(s):\n{TAB}{3}\n\n')
e = e.format(source, content, '\n'.join(input_lines),
repr(found), TAB=TAB)
raise RuntimeError(e)
found = found[len(output_prompt):].strip()
# Handle the actual doctest comparison.
if decorator.strip() == '@doctest':
# Standard doctest
if found != submitted:
e = ('doctest failure\n\n'
'Document source: {0}\n\n'
'Raw content: \n{1}\n\n'
'On input line(s):\n{TAB}{2}\n\n'
'we found output:\n{TAB}{3}\n\n'
'instead of the expected:\n{TAB}{4}\n\n')
e = e.format(source, content, '\n'.join(input_lines),
repr(found), repr(submitted), TAB=TAB)
raise RuntimeError(e)
else:
self.custom_doctest(decorator, input_lines, found, submitted)
def process_comment(self, data):
"""Process data fPblock for COMMENT token."""
if not self.is_suppress:
return [data]
def save_image(self, image_file):
"""
Saves the image file to disk.
"""
self.ensure_pyplot()
command = ('plt.gcf().savefig("%s", bbox_inches="tight", '
'dpi=100)' % image_file)
#print 'SAVEFIG', command # dbg
self.process_input_line('bookmark ipy_thisdir', store_history=False)
self.process_input_line('cd -b ipy_savedir', store_history=False)
self.process_input_line(command, store_history=False)
self.process_input_line('cd -b ipy_thisdir', store_history=False)
self.process_input_line('bookmark -d ipy_thisdir', store_history=False)
self.clear_cout()
def process_block(self, block):
"""
process block from the block_parser and return a list of processed lines
"""
ret = []
output = None
input_lines = None
lineno = self.IP.execution_count
input_prompt = self.promptin % lineno
output_prompt = self.promptout % lineno
image_file = None
image_directive = None
for token, data in block:
if token == COMMENT:
out_data = self.process_comment(data)
elif token == INPUT:
(out_data, input_lines, output, is_doctest, decorator,
image_file, image_directive) = \
self.process_input(data, input_prompt, lineno)
elif token == OUTPUT:
out_data = \
self.process_output(data, output_prompt,
input_lines, output, is_doctest,
decorator, image_file)
if out_data:
ret.extend(out_data)
# save the image files
if image_file is not None:
self.save_image(image_file)
return ret, image_directive
def process_pure_python(self, content):
"""
content is a list of strings. it is unedited directive content
This runs it line by line in the InteractiveShell, prepends
prompts as needed capturing stderr and stdout, then returns
the content as a list as if it were ipython code
"""
output = []
savefig = False # keep up with this to clear figure
multiline = False # to handle line continuation
multiline_start = None
fmtin = self.promptin
ct = 0
for lineno, line in enumerate(content):
line_stripped = line.strip()
if not len(line):
output.append(line)
continue
# handle decorators
if line_stripped.startswith('@'):
output.extend([line])
if 'savefig' in line:
savefig = True # and need to clear figure
continue
# handle comments
if line_stripped.startswith('#'):
output.extend([line])
continue
# deal with lines checking for multiline
continuation = u' %s:'% ''.join(['.']*(len(str(ct))+2))
if not multiline:
modified = u"%s %s" % (fmtin % ct, line_stripped)
output.append(modified)
ct += 1
try:
ast.parse(line_stripped)
output.append(u'')
except Exception: # on a multiline
multiline = True
multiline_start = lineno
else: # still on a multiline
modified = u'%s %s' % (continuation, line)
output.append(modified)
# if the next line is indented, it should be part of multiline
if len(content) > lineno + 1:
nextline = content[lineno + 1]
if len(nextline) - len(nextline.lstrip()) > 3:
continue
try:
mod = ast.parse(
'\n'.join(content[multiline_start:lineno+1]))
if isinstance(mod.body[0], ast.FunctionDef):
# check to see if we have the whole function
for element in mod.body[0].body:
if isinstance(element, ast.Return):
multiline = False
else:
output.append(u'')
multiline = False
except Exception:
pass
if savefig: # clear figure if plotted
self.ensure_pyplot()
self.process_input_line('plt.clf()', store_history=False)
self.clear_cout()
savefig = False
return output
def custom_doctest(self, decorator, input_lines, found, submitted):
"""
Perform a specialized doctest.
"""
from .custom_doctests import doctests
args = decorator.split()
doctest_type = args[1]
if doctest_type in doctests:
doctests[doctest_type](self, args, input_lines, found, submitted)
else:
e = "Invalid option to @doctest: {0}".format(doctest_type)
raise Exception(e)
|
TheHive-Project/TheHive4py
|
thehive4py/models.py
|
CaseHelper.create
|
python
|
def create(self, title, description, **kwargs):
case = Case(title=title, description=description, **kwargs)
response = self._thehive.create_case(case)
# Check for failed authentication
if response.status_code == requests.codes.unauthorized:
raise TheHiveException("Authentication failed")
if self.status_ok(response.status_code):
return self(response.json()['id'])
else:
raise CaseException("Server returned {}: {}".format(response.status_code, response.text))
|
Create an instance of the Case class.
:param title: Case title.
:param description: Case description.
:param kwargs: Additional arguments.
:return: The created instance.
|
train
|
https://github.com/TheHive-Project/TheHive4py/blob/35762bbd50d8376943268464326b59c752d6241b/thehive4py/models.py#L154-L174
|
[
"def status_ok(status_code):\n \"\"\"Check whether a status code is OK\"\"\"\n OK_STATUS_CODES = [200, 201]\n return status_code in OK_STATUS_CODES\n"
] |
class CaseHelper:
"""
Provides helper methods for interacting with instances of the Case class.
"""
def __init__(self, thehive):
"""
Initialize a CaseHelper instance.
:param thehive: A TheHiveApi instance.
"""
self._thehive = thehive
def __call__(self, id):
"""
Return an instance of Case with the given case ID.
:param id: ID of a case to retrieve.
"""
response = self._thehive.get_case(id)
# Check for failed authentication
if response.status_code == requests.codes.unauthorized:
raise TheHiveException("Authentication failed")
if response.status_code == requests.codes.not_found:
raise CaseException("Case {} not found".format(id))
if self.status_ok(response.status_code):
data = response.json()
case = Case(json=data)
# Add attributes that are not added by the constructor
case.id = data.get('id', None)
case.owner = data.get('owner', None)
case.caseId = data.get('caseId', None)
case.status = data.get('status', None)
case.createdAt = data.get('createdAt', None)
case.createdBy = data.get('createdBy', None)
case.updatedAt = data.get('updatedAt', None)
case.updatedBy = data.get('updatedBy', None)
return case
def update(self, case_id, **attributes):
"""
Update a case.
:param case_id: The ID of the case to update
:param attributes: key=value pairs of case attributes to update (field=new_value)
:return: The created instance.
"""
response = self._thehive.do_patch("/api/case/{}".format(case_id), **attributes)
if response.status_code == requests.codes.unauthorized:
raise TheHiveException("Authentication failed")
if self.status_ok(response.status_code):
return self(response.json()['id'])
else:
raise CaseException("Server returned {}: {}".format(response.status_code, response.text))
@staticmethod
def status_ok(status_code):
"""Check whether a status code is OK"""
OK_STATUS_CODES = [200, 201]
return status_code in OK_STATUS_CODES
|
TheHive-Project/TheHive4py
|
thehive4py/models.py
|
CaseHelper.update
|
python
|
def update(self, case_id, **attributes):
response = self._thehive.do_patch("/api/case/{}".format(case_id), **attributes)
if response.status_code == requests.codes.unauthorized:
raise TheHiveException("Authentication failed")
if self.status_ok(response.status_code):
return self(response.json()['id'])
else:
raise CaseException("Server returned {}: {}".format(response.status_code, response.text))
|
Update a case.
:param case_id: The ID of the case to update
:param attributes: key=value pairs of case attributes to update (field=new_value)
:return: The created instance.
|
train
|
https://github.com/TheHive-Project/TheHive4py/blob/35762bbd50d8376943268464326b59c752d6241b/thehive4py/models.py#L176-L193
|
[
"def status_ok(status_code):\n \"\"\"Check whether a status code is OK\"\"\"\n OK_STATUS_CODES = [200, 201]\n return status_code in OK_STATUS_CODES\n"
] |
class CaseHelper:
"""
Provides helper methods for interacting with instances of the Case class.
"""
def __init__(self, thehive):
"""
Initialize a CaseHelper instance.
:param thehive: A TheHiveApi instance.
"""
self._thehive = thehive
def __call__(self, id):
"""
Return an instance of Case with the given case ID.
:param id: ID of a case to retrieve.
"""
response = self._thehive.get_case(id)
# Check for failed authentication
if response.status_code == requests.codes.unauthorized:
raise TheHiveException("Authentication failed")
if response.status_code == requests.codes.not_found:
raise CaseException("Case {} not found".format(id))
if self.status_ok(response.status_code):
data = response.json()
case = Case(json=data)
# Add attributes that are not added by the constructor
case.id = data.get('id', None)
case.owner = data.get('owner', None)
case.caseId = data.get('caseId', None)
case.status = data.get('status', None)
case.createdAt = data.get('createdAt', None)
case.createdBy = data.get('createdBy', None)
case.updatedAt = data.get('updatedAt', None)
case.updatedBy = data.get('updatedBy', None)
return case
def create(self, title, description, **kwargs):
"""
Create an instance of the Case class.
:param title: Case title.
:param description: Case description.
:param kwargs: Additional arguments.
:return: The created instance.
"""
case = Case(title=title, description=description, **kwargs)
response = self._thehive.create_case(case)
# Check for failed authentication
if response.status_code == requests.codes.unauthorized:
raise TheHiveException("Authentication failed")
if self.status_ok(response.status_code):
return self(response.json()['id'])
else:
raise CaseException("Server returned {}: {}".format(response.status_code, response.text))
def update(self, case_id, **attributes):
"""
Update a case.
:param case_id: The ID of the case to update
:param attributes: key=value pairs of case attributes to update (field=new_value)
:return: The created instance.
"""
response = self._thehive.do_patch("/api/case/{}".format(case_id), **attributes)
if response.status_code == requests.codes.unauthorized:
raise TheHiveException("Authentication failed")
if self.status_ok(response.status_code):
return self(response.json()['id'])
else:
raise CaseException("Server returned {}: {}".format(response.status_code, response.text))
@staticmethod
def status_ok(status_code):
"""Check whether a status code is OK"""
OK_STATUS_CODES = [200, 201]
return status_code in OK_STATUS_CODES
|
TheHive-Project/TheHive4py
|
thehive4py/api.py
|
TheHiveApi.__find_rows
|
python
|
def __find_rows(self, find_url, **attributes):
req = self.url + find_url
# Add range and sort parameters
params = {
"range": attributes.get("range", "all"),
"sort": attributes.get("sort", [])
}
# Add body
data = {
"query": attributes.get("query", {})
}
try:
return requests.post(req, params=params, json=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise TheHiveException("Error: {}".format(e))
|
:param find_url: URL of the find api
:type find_url: string
:return: The Response returned by requests including the list of documents based on find_url
:rtype: Response object
|
train
|
https://github.com/TheHive-Project/TheHive4py/blob/35762bbd50d8376943268464326b59c752d6241b/thehive4py/api.py#L61-L84
| null |
class TheHiveApi:
"""
Python API for TheHive
:param url: thehive URL
:param principal: The username or the API key
:param password: The password for basic authentication or None. Defaults to None
"""
def __init__(self, url, principal, password=None, proxies={}, cert=True):
self.url = url
self.principal = principal
self.password = password
self.proxies = proxies
if self.password is not None:
self.auth = requests.auth.HTTPBasicAuth(self.principal,self.password)
else:
self.auth = BearerAuth(self.principal)
self.cert = cert
# Create a CaseHelper instance
self.case = CaseHelper(self)
def do_patch(self, api_url, **attributes):
return requests.patch(self.url + api_url, headers={'Content-Type': 'application/json'}, json=attributes,
proxies=self.proxies, auth=self.auth, verify=self.cert)
def create_case(self, case):
"""
:param case: The case details
:type case: Case defined in models.py
:return: TheHive case
:rtype: json
"""
req = self.url + "/api/case"
data = case.jsonify()
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, data=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseException("Case create error: {}".format(e))
def update_case(self, case, fields=[]):
"""
Update a case.
:param case: The case to update. The case's `id` determines which case to update.
:param fields: Optional parameter, an array of fields names, the ones we want to update
:return:
"""
req = self.url + "/api/case/{}".format(case.id)
# Choose which attributes to send
update_keys = [
'title', 'description', 'severity', 'startDate', 'owner', 'flag', 'tlp', 'tags', 'status', 'resolutionStatus',
'impactStatus', 'summary', 'endDate', 'metrics', 'customFields'
]
data = {k: v for k, v in case.__dict__.items() if (len(fields) > 0 and k in fields) or (len(fields) == 0 and k in update_keys)}
try:
return requests.patch(req, headers={'Content-Type': 'application/json'}, json=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException:
raise CaseException("Case update error: {}".format(e))
def create_case_task(self, case_id, case_task):
"""
:param case_id: Case identifier
:param case_task: TheHive task
:type case_task: CaseTask defined in models.py
:return: TheHive task
:rtype: json
"""
req = self.url + "/api/case/{}/task".format(case_id)
data = case_task.jsonify()
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, data=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseTaskException("Case task create error: {}".format(e))
def update_case_task(self, task):
"""
:Updates TheHive Task
:param case: The task to update. The task's `id` determines which Task to update.
:return:
"""
req = self.url + "/api/case/task/{}".format(task.id)
# Choose which attributes to send
update_keys = [
'title', 'description', 'status', 'order', 'user', 'owner', 'flag', 'endDate'
]
data = {k: v for k, v in task.__dict__.items() if k in update_keys}
try:
return requests.patch(req, headers={'Content-Type': 'application/json'}, json=data,
proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseTaskException("Case task update error: {}".format(e))
def create_task_log(self, task_id, case_task_log):
"""
:param task_id: Task identifier
:param case_task_log: TheHive log
:type case_task_log: CaseTaskLog defined in models.py
:return: TheHive log
:rtype: json
"""
req = self.url + "/api/case/task/{}/log".format(task_id)
data = {'_json': json.dumps({"message":case_task_log.message})}
if case_task_log.file:
f = {'attachment': (os.path.basename(case_task_log.file), open(case_task_log.file, 'rb'), magic.Magic(mime=True).from_file(case_task_log.file))}
try:
return requests.post(req, data=data,files=f, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseTaskException("Case task log create error: {}".format(e))
else:
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, data=json.dumps({'message':case_task_log.message}), proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseTaskException("Case task log create error: {}".format(e))
def create_case_observable(self, case_id, case_observable):
"""
:param case_id: Case identifier
:param case_observable: TheHive observable
:type case_observable: CaseObservable defined in models.py
:return: TheHive observable
:rtype: json
"""
req = self.url + "/api/case/{}/artifact".format(case_id)
if case_observable.dataType == 'file':
try:
mesg = json.dumps({ "dataType": case_observable.dataType,
"message": case_observable.message,
"tlp": case_observable.tlp,
"tags": case_observable.tags,
"ioc": case_observable.ioc
})
data = {"_json": mesg}
return requests.post(req, data=data, files=case_observable.data[0], proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseObservableException("Case observable create error: {}".format(e))
else:
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, data=case_observable.jsonify(), proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseObservableException("Case observable create error: {}".format(e))
def get_case(self, case_id):
"""
:param case_id: Case identifier
:return: TheHive case
:rtype: json
"""
req = self.url + "/api/case/{}".format(case_id)
try:
return requests.get(req, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseException("Case fetch error: {}".format(e))
def find_cases(self, **attributes):
return self.__find_rows("/api/case/_search", **attributes)
def find_first(self, **attributes):
"""
:return: first case of result set given by query
:rtype: dict
"""
return self.find_cases(**attributes).json()[0]
def get_case_observables(self, case_id, **attributes):
"""
:param case_id: Case identifier
:return: list of observables
;rtype: json
"""
req = self.url + "/api/case/artifact/_search"
# Add range and sort parameters
params = {
"range": attributes.get("range", "all"),
"sort": attributes.get("sort", [])
}
# Add body
parent_criteria = Parent('case', Id(case_id))
# Append the custom query if specified
if "query" in attributes:
criteria = And(parent_criteria, attributes["query"])
else:
criteria = parent_criteria
data = {
"query": criteria
}
try:
return requests.post(req, params=params, json=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseObservableException("Case observables search error: {}".format(e))
def get_case_tasks(self, case_id, **attributes):
req = self.url + "/api/case/task/_search"
# Add range and sort parameters
params = {
"range": attributes.get("range", "all"),
"sort": attributes.get("sort", [])
}
# Add body
parent_criteria = Parent('case', Id(case_id))
# Append the custom query if specified
if "query" in attributes:
criteria = And(parent_criteria, attributes["query"])
else:
criteria = parent_criteria
data = {
"query": criteria
}
try:
return requests.post(req, params=params, json=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseTaskException("Case tasks search error: {}".format(e))
def get_linked_cases(self, case_id):
"""
:param case_id: Case identifier
:return: TheHive case(s)
:rtype: json
"""
req = self.url + "/api/case/{}/links".format(case_id)
try:
return requests.get(req, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseException("Linked cases fetch error: {}".format(e))
def find_case_templates(self, **attributes):
"""
:return: list of case templates
:rtype: json
"""
return self.__find_rows("/api/case/template/_search", **attributes)
def get_case_template(self, name):
"""
:param name: Case template name
:return: TheHive case template
:rtype: json
"""
req = self.url + "/api/case/template/_search"
data = {
"query": And(Eq("name", name), Eq("status", "Ok"))
}
try:
response = requests.post(req, json=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
json_response = response.json()
if response.status_code == 200 and len(json_response) > 0:
return response.json()[0]
else:
raise CaseTemplateException("Case template fetch error: Unable to find case template {}".format(name))
except requests.exceptions.RequestException as e:
raise CaseTemplateException("Case template fetch error: {}".format(e))
def get_task_logs(self, taskId):
"""
:param taskId: Task identifier
:type caseTaskLog: CaseTaskLog defined in models.py
:return: TheHive logs
:rtype: json
"""
req = self.url + "/api/case/task/{}/log".format(taskId)
try:
return requests.get(req, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseTaskException("Case task logs search error: {}".format(e))
def create_alert(self, alert):
"""
:param alert: TheHive alert
:type alert: Alert defined in models.py
:return: TheHive alert
:rtype: json
"""
req = self.url + "/api/alert"
data = alert.jsonify()
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, data=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise AlertException("Alert create error: {}".format(e))
def mark_alert_as_read(self, alert_id):
"""
Mark an alert as read.
:param alert_id: The ID of the alert to mark as read.
:return:
"""
req = self.url + "/api/alert/{}/markAsRead".format(alert_id)
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException:
raise AlertException("Mark alert as read error: {}".format(e))
def mark_alert_as_unread(self, alert_id):
"""
Mark an alert as unread.
:param alert_id: The ID of the alert to mark as unread.
:return:
"""
req = self.url + "/api/alert/{}/markAsUnread".format(alert_id)
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException:
raise AlertException("Mark alert as unread error: {}".format(e))
def update_alert(self, alert_id, alert, fields=[]):
"""
Update an alert.
:param alert_id: The ID of the alert to update.
:param data: The alert to update.
:param fields: Optional parameter, an array of fields names, the ones we want to update
:return:
"""
req = self.url + "/api/alert/{}".format(alert_id)
# update only the alert attributes that are not read-only
update_keys = ['tlp', 'severity', 'tags', 'caseTemplate', 'title', 'description']
data = {k: v for k, v in alert.__dict__.items() if
(len(fields) > 0 and k in fields) or (len(fields) == 0 and k in update_keys)}
if hasattr(alert, 'artifacts'):
data['artifacts'] = [a.__dict__ for a in alert.artifacts]
try:
return requests.patch(req, headers={'Content-Type': 'application/json'}, json=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException:
raise AlertException("Alert update error: {}".format(e))
def get_alert(self, alert_id):
"""
:param alert_id: Alert identifier
:return: TheHive Alert
:rtype: json
"""
req = self.url + "/api/alert/{}".format(alert_id)
try:
return requests.get(req, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise AlertException("Alert fetch error: {}".format(e))
def find_alerts(self, **attributes):
"""
:return: list of Alerts
:rtype: json
"""
return self.__find_rows("/api/alert/_search", **attributes)
def promote_alert_to_case(self, alert_id):
"""
This uses the TheHiveAPI to promote an alert to a case
:param alert_id: Alert identifier
:return: TheHive Case
:rtype: json
"""
req = self.url + "/api/alert/{}/createCase".format(alert_id)
try:
return requests.post(req, headers={'Content-Type': 'application/json'},
proxies=self.proxies, auth=self.auth,
verify=self.cert, data=json.dumps({}))
except requests.exceptions.RequestException as the_exception:
raise AlertException("Couldn't promote alert to case: {}".format(the_exception))
return None
def run_analyzer(self, cortex_id, artifact_id, analyzer_id):
"""
:param cortex_id: identifier of the Cortex server
:param artifact_id: identifier of the artifact as found with an artifact search
:param analyzer_id: name of the analyzer used by the job
:rtype: json
"""
req = self.url + "/api/connector/cortex/job"
try:
data = json.dumps({ "cortexId": cortex_id,
"artifactId": artifact_id,
"analyzerId": analyzer_id
})
return requests.post(req, headers={'Content-Type': 'application/json'}, data=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise TheHiveException("Analyzer run error: {}".format(e))
def find_tasks(self, **attributes):
"""
:return: list of Tasks
:rtype: json
"""
return self.__find_rows("/api/case/task/_search", **attributes)
|
TheHive-Project/TheHive4py
|
thehive4py/api.py
|
TheHiveApi.create_case
|
python
|
def create_case(self, case):
req = self.url + "/api/case"
data = case.jsonify()
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, data=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseException("Case create error: {}".format(e))
|
:param case: The case details
:type case: Case defined in models.py
:return: TheHive case
:rtype: json
|
train
|
https://github.com/TheHive-Project/TheHive4py/blob/35762bbd50d8376943268464326b59c752d6241b/thehive4py/api.py#L90-L104
|
[
"def jsonify(self):\n return json.dumps(self, sort_keys=True, indent=4, cls=CustomJsonEncoder)\n"
] |
class TheHiveApi:
"""
Python API for TheHive
:param url: thehive URL
:param principal: The username or the API key
:param password: The password for basic authentication or None. Defaults to None
"""
def __init__(self, url, principal, password=None, proxies={}, cert=True):
self.url = url
self.principal = principal
self.password = password
self.proxies = proxies
if self.password is not None:
self.auth = requests.auth.HTTPBasicAuth(self.principal,self.password)
else:
self.auth = BearerAuth(self.principal)
self.cert = cert
# Create a CaseHelper instance
self.case = CaseHelper(self)
def __find_rows(self, find_url, **attributes):
"""
:param find_url: URL of the find api
:type find_url: string
:return: The Response returned by requests including the list of documents based on find_url
:rtype: Response object
"""
req = self.url + find_url
# Add range and sort parameters
params = {
"range": attributes.get("range", "all"),
"sort": attributes.get("sort", [])
}
# Add body
data = {
"query": attributes.get("query", {})
}
try:
return requests.post(req, params=params, json=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise TheHiveException("Error: {}".format(e))
def do_patch(self, api_url, **attributes):
return requests.patch(self.url + api_url, headers={'Content-Type': 'application/json'}, json=attributes,
proxies=self.proxies, auth=self.auth, verify=self.cert)
def update_case(self, case, fields=[]):
"""
Update a case.
:param case: The case to update. The case's `id` determines which case to update.
:param fields: Optional parameter, an array of fields names, the ones we want to update
:return:
"""
req = self.url + "/api/case/{}".format(case.id)
# Choose which attributes to send
update_keys = [
'title', 'description', 'severity', 'startDate', 'owner', 'flag', 'tlp', 'tags', 'status', 'resolutionStatus',
'impactStatus', 'summary', 'endDate', 'metrics', 'customFields'
]
data = {k: v for k, v in case.__dict__.items() if (len(fields) > 0 and k in fields) or (len(fields) == 0 and k in update_keys)}
try:
return requests.patch(req, headers={'Content-Type': 'application/json'}, json=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException:
raise CaseException("Case update error: {}".format(e))
def create_case_task(self, case_id, case_task):
"""
:param case_id: Case identifier
:param case_task: TheHive task
:type case_task: CaseTask defined in models.py
:return: TheHive task
:rtype: json
"""
req = self.url + "/api/case/{}/task".format(case_id)
data = case_task.jsonify()
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, data=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseTaskException("Case task create error: {}".format(e))
def update_case_task(self, task):
"""
:Updates TheHive Task
:param case: The task to update. The task's `id` determines which Task to update.
:return:
"""
req = self.url + "/api/case/task/{}".format(task.id)
# Choose which attributes to send
update_keys = [
'title', 'description', 'status', 'order', 'user', 'owner', 'flag', 'endDate'
]
data = {k: v for k, v in task.__dict__.items() if k in update_keys}
try:
return requests.patch(req, headers={'Content-Type': 'application/json'}, json=data,
proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseTaskException("Case task update error: {}".format(e))
def create_task_log(self, task_id, case_task_log):
"""
:param task_id: Task identifier
:param case_task_log: TheHive log
:type case_task_log: CaseTaskLog defined in models.py
:return: TheHive log
:rtype: json
"""
req = self.url + "/api/case/task/{}/log".format(task_id)
data = {'_json': json.dumps({"message":case_task_log.message})}
if case_task_log.file:
f = {'attachment': (os.path.basename(case_task_log.file), open(case_task_log.file, 'rb'), magic.Magic(mime=True).from_file(case_task_log.file))}
try:
return requests.post(req, data=data,files=f, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseTaskException("Case task log create error: {}".format(e))
else:
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, data=json.dumps({'message':case_task_log.message}), proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseTaskException("Case task log create error: {}".format(e))
def create_case_observable(self, case_id, case_observable):
"""
:param case_id: Case identifier
:param case_observable: TheHive observable
:type case_observable: CaseObservable defined in models.py
:return: TheHive observable
:rtype: json
"""
req = self.url + "/api/case/{}/artifact".format(case_id)
if case_observable.dataType == 'file':
try:
mesg = json.dumps({ "dataType": case_observable.dataType,
"message": case_observable.message,
"tlp": case_observable.tlp,
"tags": case_observable.tags,
"ioc": case_observable.ioc
})
data = {"_json": mesg}
return requests.post(req, data=data, files=case_observable.data[0], proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseObservableException("Case observable create error: {}".format(e))
else:
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, data=case_observable.jsonify(), proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseObservableException("Case observable create error: {}".format(e))
def get_case(self, case_id):
"""
:param case_id: Case identifier
:return: TheHive case
:rtype: json
"""
req = self.url + "/api/case/{}".format(case_id)
try:
return requests.get(req, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseException("Case fetch error: {}".format(e))
def find_cases(self, **attributes):
return self.__find_rows("/api/case/_search", **attributes)
def find_first(self, **attributes):
"""
:return: first case of result set given by query
:rtype: dict
"""
return self.find_cases(**attributes).json()[0]
def get_case_observables(self, case_id, **attributes):
"""
:param case_id: Case identifier
:return: list of observables
;rtype: json
"""
req = self.url + "/api/case/artifact/_search"
# Add range and sort parameters
params = {
"range": attributes.get("range", "all"),
"sort": attributes.get("sort", [])
}
# Add body
parent_criteria = Parent('case', Id(case_id))
# Append the custom query if specified
if "query" in attributes:
criteria = And(parent_criteria, attributes["query"])
else:
criteria = parent_criteria
data = {
"query": criteria
}
try:
return requests.post(req, params=params, json=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseObservableException("Case observables search error: {}".format(e))
def get_case_tasks(self, case_id, **attributes):
req = self.url + "/api/case/task/_search"
# Add range and sort parameters
params = {
"range": attributes.get("range", "all"),
"sort": attributes.get("sort", [])
}
# Add body
parent_criteria = Parent('case', Id(case_id))
# Append the custom query if specified
if "query" in attributes:
criteria = And(parent_criteria, attributes["query"])
else:
criteria = parent_criteria
data = {
"query": criteria
}
try:
return requests.post(req, params=params, json=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseTaskException("Case tasks search error: {}".format(e))
def get_linked_cases(self, case_id):
"""
:param case_id: Case identifier
:return: TheHive case(s)
:rtype: json
"""
req = self.url + "/api/case/{}/links".format(case_id)
try:
return requests.get(req, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseException("Linked cases fetch error: {}".format(e))
def find_case_templates(self, **attributes):
"""
:return: list of case templates
:rtype: json
"""
return self.__find_rows("/api/case/template/_search", **attributes)
def get_case_template(self, name):
"""
:param name: Case template name
:return: TheHive case template
:rtype: json
"""
req = self.url + "/api/case/template/_search"
data = {
"query": And(Eq("name", name), Eq("status", "Ok"))
}
try:
response = requests.post(req, json=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
json_response = response.json()
if response.status_code == 200 and len(json_response) > 0:
return response.json()[0]
else:
raise CaseTemplateException("Case template fetch error: Unable to find case template {}".format(name))
except requests.exceptions.RequestException as e:
raise CaseTemplateException("Case template fetch error: {}".format(e))
def get_task_logs(self, taskId):
"""
:param taskId: Task identifier
:type caseTaskLog: CaseTaskLog defined in models.py
:return: TheHive logs
:rtype: json
"""
req = self.url + "/api/case/task/{}/log".format(taskId)
try:
return requests.get(req, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseTaskException("Case task logs search error: {}".format(e))
def create_alert(self, alert):
"""
:param alert: TheHive alert
:type alert: Alert defined in models.py
:return: TheHive alert
:rtype: json
"""
req = self.url + "/api/alert"
data = alert.jsonify()
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, data=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise AlertException("Alert create error: {}".format(e))
def mark_alert_as_read(self, alert_id):
"""
Mark an alert as read.
:param alert_id: The ID of the alert to mark as read.
:return:
"""
req = self.url + "/api/alert/{}/markAsRead".format(alert_id)
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException:
raise AlertException("Mark alert as read error: {}".format(e))
def mark_alert_as_unread(self, alert_id):
"""
Mark an alert as unread.
:param alert_id: The ID of the alert to mark as unread.
:return:
"""
req = self.url + "/api/alert/{}/markAsUnread".format(alert_id)
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException:
raise AlertException("Mark alert as unread error: {}".format(e))
def update_alert(self, alert_id, alert, fields=[]):
"""
Update an alert.
:param alert_id: The ID of the alert to update.
:param data: The alert to update.
:param fields: Optional parameter, an array of fields names, the ones we want to update
:return:
"""
req = self.url + "/api/alert/{}".format(alert_id)
# update only the alert attributes that are not read-only
update_keys = ['tlp', 'severity', 'tags', 'caseTemplate', 'title', 'description']
data = {k: v for k, v in alert.__dict__.items() if
(len(fields) > 0 and k in fields) or (len(fields) == 0 and k in update_keys)}
if hasattr(alert, 'artifacts'):
data['artifacts'] = [a.__dict__ for a in alert.artifacts]
try:
return requests.patch(req, headers={'Content-Type': 'application/json'}, json=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException:
raise AlertException("Alert update error: {}".format(e))
def get_alert(self, alert_id):
"""
:param alert_id: Alert identifier
:return: TheHive Alert
:rtype: json
"""
req = self.url + "/api/alert/{}".format(alert_id)
try:
return requests.get(req, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise AlertException("Alert fetch error: {}".format(e))
def find_alerts(self, **attributes):
"""
:return: list of Alerts
:rtype: json
"""
return self.__find_rows("/api/alert/_search", **attributes)
def promote_alert_to_case(self, alert_id):
"""
This uses the TheHiveAPI to promote an alert to a case
:param alert_id: Alert identifier
:return: TheHive Case
:rtype: json
"""
req = self.url + "/api/alert/{}/createCase".format(alert_id)
try:
return requests.post(req, headers={'Content-Type': 'application/json'},
proxies=self.proxies, auth=self.auth,
verify=self.cert, data=json.dumps({}))
except requests.exceptions.RequestException as the_exception:
raise AlertException("Couldn't promote alert to case: {}".format(the_exception))
return None
def run_analyzer(self, cortex_id, artifact_id, analyzer_id):
"""
:param cortex_id: identifier of the Cortex server
:param artifact_id: identifier of the artifact as found with an artifact search
:param analyzer_id: name of the analyzer used by the job
:rtype: json
"""
req = self.url + "/api/connector/cortex/job"
try:
data = json.dumps({ "cortexId": cortex_id,
"artifactId": artifact_id,
"analyzerId": analyzer_id
})
return requests.post(req, headers={'Content-Type': 'application/json'}, data=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise TheHiveException("Analyzer run error: {}".format(e))
def find_tasks(self, **attributes):
"""
:return: list of Tasks
:rtype: json
"""
return self.__find_rows("/api/case/task/_search", **attributes)
|
TheHive-Project/TheHive4py
|
thehive4py/api.py
|
TheHiveApi.update_case
|
python
|
def update_case(self, case, fields=[]):
req = self.url + "/api/case/{}".format(case.id)
# Choose which attributes to send
update_keys = [
'title', 'description', 'severity', 'startDate', 'owner', 'flag', 'tlp', 'tags', 'status', 'resolutionStatus',
'impactStatus', 'summary', 'endDate', 'metrics', 'customFields'
]
data = {k: v for k, v in case.__dict__.items() if (len(fields) > 0 and k in fields) or (len(fields) == 0 and k in update_keys)}
try:
return requests.patch(req, headers={'Content-Type': 'application/json'}, json=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException:
raise CaseException("Case update error: {}".format(e))
|
Update a case.
:param case: The case to update. The case's `id` determines which case to update.
:param fields: Optional parameter, an array of fields names, the ones we want to update
:return:
|
train
|
https://github.com/TheHive-Project/TheHive4py/blob/35762bbd50d8376943268464326b59c752d6241b/thehive4py/api.py#L106-L124
| null |
class TheHiveApi:
"""
Python API for TheHive
:param url: thehive URL
:param principal: The username or the API key
:param password: The password for basic authentication or None. Defaults to None
"""
def __init__(self, url, principal, password=None, proxies={}, cert=True):
self.url = url
self.principal = principal
self.password = password
self.proxies = proxies
if self.password is not None:
self.auth = requests.auth.HTTPBasicAuth(self.principal,self.password)
else:
self.auth = BearerAuth(self.principal)
self.cert = cert
# Create a CaseHelper instance
self.case = CaseHelper(self)
def __find_rows(self, find_url, **attributes):
"""
:param find_url: URL of the find api
:type find_url: string
:return: The Response returned by requests including the list of documents based on find_url
:rtype: Response object
"""
req = self.url + find_url
# Add range and sort parameters
params = {
"range": attributes.get("range", "all"),
"sort": attributes.get("sort", [])
}
# Add body
data = {
"query": attributes.get("query", {})
}
try:
return requests.post(req, params=params, json=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise TheHiveException("Error: {}".format(e))
def do_patch(self, api_url, **attributes):
return requests.patch(self.url + api_url, headers={'Content-Type': 'application/json'}, json=attributes,
proxies=self.proxies, auth=self.auth, verify=self.cert)
def create_case(self, case):
"""
:param case: The case details
:type case: Case defined in models.py
:return: TheHive case
:rtype: json
"""
req = self.url + "/api/case"
data = case.jsonify()
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, data=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseException("Case create error: {}".format(e))
def create_case_task(self, case_id, case_task):
"""
:param case_id: Case identifier
:param case_task: TheHive task
:type case_task: CaseTask defined in models.py
:return: TheHive task
:rtype: json
"""
req = self.url + "/api/case/{}/task".format(case_id)
data = case_task.jsonify()
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, data=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseTaskException("Case task create error: {}".format(e))
def update_case_task(self, task):
"""
:Updates TheHive Task
:param case: The task to update. The task's `id` determines which Task to update.
:return:
"""
req = self.url + "/api/case/task/{}".format(task.id)
# Choose which attributes to send
update_keys = [
'title', 'description', 'status', 'order', 'user', 'owner', 'flag', 'endDate'
]
data = {k: v for k, v in task.__dict__.items() if k in update_keys}
try:
return requests.patch(req, headers={'Content-Type': 'application/json'}, json=data,
proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseTaskException("Case task update error: {}".format(e))
def create_task_log(self, task_id, case_task_log):
"""
:param task_id: Task identifier
:param case_task_log: TheHive log
:type case_task_log: CaseTaskLog defined in models.py
:return: TheHive log
:rtype: json
"""
req = self.url + "/api/case/task/{}/log".format(task_id)
data = {'_json': json.dumps({"message":case_task_log.message})}
if case_task_log.file:
f = {'attachment': (os.path.basename(case_task_log.file), open(case_task_log.file, 'rb'), magic.Magic(mime=True).from_file(case_task_log.file))}
try:
return requests.post(req, data=data,files=f, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseTaskException("Case task log create error: {}".format(e))
else:
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, data=json.dumps({'message':case_task_log.message}), proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseTaskException("Case task log create error: {}".format(e))
def create_case_observable(self, case_id, case_observable):
"""
:param case_id: Case identifier
:param case_observable: TheHive observable
:type case_observable: CaseObservable defined in models.py
:return: TheHive observable
:rtype: json
"""
req = self.url + "/api/case/{}/artifact".format(case_id)
if case_observable.dataType == 'file':
try:
mesg = json.dumps({ "dataType": case_observable.dataType,
"message": case_observable.message,
"tlp": case_observable.tlp,
"tags": case_observable.tags,
"ioc": case_observable.ioc
})
data = {"_json": mesg}
return requests.post(req, data=data, files=case_observable.data[0], proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseObservableException("Case observable create error: {}".format(e))
else:
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, data=case_observable.jsonify(), proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseObservableException("Case observable create error: {}".format(e))
def get_case(self, case_id):
"""
:param case_id: Case identifier
:return: TheHive case
:rtype: json
"""
req = self.url + "/api/case/{}".format(case_id)
try:
return requests.get(req, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseException("Case fetch error: {}".format(e))
def find_cases(self, **attributes):
return self.__find_rows("/api/case/_search", **attributes)
def find_first(self, **attributes):
"""
:return: first case of result set given by query
:rtype: dict
"""
return self.find_cases(**attributes).json()[0]
def get_case_observables(self, case_id, **attributes):
"""
:param case_id: Case identifier
:return: list of observables
;rtype: json
"""
req = self.url + "/api/case/artifact/_search"
# Add range and sort parameters
params = {
"range": attributes.get("range", "all"),
"sort": attributes.get("sort", [])
}
# Add body
parent_criteria = Parent('case', Id(case_id))
# Append the custom query if specified
if "query" in attributes:
criteria = And(parent_criteria, attributes["query"])
else:
criteria = parent_criteria
data = {
"query": criteria
}
try:
return requests.post(req, params=params, json=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseObservableException("Case observables search error: {}".format(e))
def get_case_tasks(self, case_id, **attributes):
req = self.url + "/api/case/task/_search"
# Add range and sort parameters
params = {
"range": attributes.get("range", "all"),
"sort": attributes.get("sort", [])
}
# Add body
parent_criteria = Parent('case', Id(case_id))
# Append the custom query if specified
if "query" in attributes:
criteria = And(parent_criteria, attributes["query"])
else:
criteria = parent_criteria
data = {
"query": criteria
}
try:
return requests.post(req, params=params, json=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseTaskException("Case tasks search error: {}".format(e))
def get_linked_cases(self, case_id):
"""
:param case_id: Case identifier
:return: TheHive case(s)
:rtype: json
"""
req = self.url + "/api/case/{}/links".format(case_id)
try:
return requests.get(req, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseException("Linked cases fetch error: {}".format(e))
def find_case_templates(self, **attributes):
"""
:return: list of case templates
:rtype: json
"""
return self.__find_rows("/api/case/template/_search", **attributes)
def get_case_template(self, name):
"""
:param name: Case template name
:return: TheHive case template
:rtype: json
"""
req = self.url + "/api/case/template/_search"
data = {
"query": And(Eq("name", name), Eq("status", "Ok"))
}
try:
response = requests.post(req, json=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
json_response = response.json()
if response.status_code == 200 and len(json_response) > 0:
return response.json()[0]
else:
raise CaseTemplateException("Case template fetch error: Unable to find case template {}".format(name))
except requests.exceptions.RequestException as e:
raise CaseTemplateException("Case template fetch error: {}".format(e))
def get_task_logs(self, taskId):
"""
:param taskId: Task identifier
:type caseTaskLog: CaseTaskLog defined in models.py
:return: TheHive logs
:rtype: json
"""
req = self.url + "/api/case/task/{}/log".format(taskId)
try:
return requests.get(req, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseTaskException("Case task logs search error: {}".format(e))
def create_alert(self, alert):
"""
:param alert: TheHive alert
:type alert: Alert defined in models.py
:return: TheHive alert
:rtype: json
"""
req = self.url + "/api/alert"
data = alert.jsonify()
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, data=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise AlertException("Alert create error: {}".format(e))
def mark_alert_as_read(self, alert_id):
"""
Mark an alert as read.
:param alert_id: The ID of the alert to mark as read.
:return:
"""
req = self.url + "/api/alert/{}/markAsRead".format(alert_id)
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException:
raise AlertException("Mark alert as read error: {}".format(e))
def mark_alert_as_unread(self, alert_id):
"""
Mark an alert as unread.
:param alert_id: The ID of the alert to mark as unread.
:return:
"""
req = self.url + "/api/alert/{}/markAsUnread".format(alert_id)
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException:
raise AlertException("Mark alert as unread error: {}".format(e))
def update_alert(self, alert_id, alert, fields=[]):
"""
Update an alert.
:param alert_id: The ID of the alert to update.
:param data: The alert to update.
:param fields: Optional parameter, an array of fields names, the ones we want to update
:return:
"""
req = self.url + "/api/alert/{}".format(alert_id)
# update only the alert attributes that are not read-only
update_keys = ['tlp', 'severity', 'tags', 'caseTemplate', 'title', 'description']
data = {k: v for k, v in alert.__dict__.items() if
(len(fields) > 0 and k in fields) or (len(fields) == 0 and k in update_keys)}
if hasattr(alert, 'artifacts'):
data['artifacts'] = [a.__dict__ for a in alert.artifacts]
try:
return requests.patch(req, headers={'Content-Type': 'application/json'}, json=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException:
raise AlertException("Alert update error: {}".format(e))
def get_alert(self, alert_id):
"""
:param alert_id: Alert identifier
:return: TheHive Alert
:rtype: json
"""
req = self.url + "/api/alert/{}".format(alert_id)
try:
return requests.get(req, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise AlertException("Alert fetch error: {}".format(e))
def find_alerts(self, **attributes):
"""
:return: list of Alerts
:rtype: json
"""
return self.__find_rows("/api/alert/_search", **attributes)
def promote_alert_to_case(self, alert_id):
"""
This uses the TheHiveAPI to promote an alert to a case
:param alert_id: Alert identifier
:return: TheHive Case
:rtype: json
"""
req = self.url + "/api/alert/{}/createCase".format(alert_id)
try:
return requests.post(req, headers={'Content-Type': 'application/json'},
proxies=self.proxies, auth=self.auth,
verify=self.cert, data=json.dumps({}))
except requests.exceptions.RequestException as the_exception:
raise AlertException("Couldn't promote alert to case: {}".format(the_exception))
return None
def run_analyzer(self, cortex_id, artifact_id, analyzer_id):
"""
:param cortex_id: identifier of the Cortex server
:param artifact_id: identifier of the artifact as found with an artifact search
:param analyzer_id: name of the analyzer used by the job
:rtype: json
"""
req = self.url + "/api/connector/cortex/job"
try:
data = json.dumps({ "cortexId": cortex_id,
"artifactId": artifact_id,
"analyzerId": analyzer_id
})
return requests.post(req, headers={'Content-Type': 'application/json'}, data=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise TheHiveException("Analyzer run error: {}".format(e))
def find_tasks(self, **attributes):
"""
:return: list of Tasks
:rtype: json
"""
return self.__find_rows("/api/case/task/_search", **attributes)
|
TheHive-Project/TheHive4py
|
thehive4py/api.py
|
TheHiveApi.create_case_task
|
python
|
def create_case_task(self, case_id, case_task):
req = self.url + "/api/case/{}/task".format(case_id)
data = case_task.jsonify()
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, data=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseTaskException("Case task create error: {}".format(e))
|
:param case_id: Case identifier
:param case_task: TheHive task
:type case_task: CaseTask defined in models.py
:return: TheHive task
:rtype: json
|
train
|
https://github.com/TheHive-Project/TheHive4py/blob/35762bbd50d8376943268464326b59c752d6241b/thehive4py/api.py#L126-L143
|
[
"def jsonify(self):\n return json.dumps(self, sort_keys=True, indent=4, cls=CustomJsonEncoder)\n"
] |
class TheHiveApi:
"""
Python API for TheHive
:param url: thehive URL
:param principal: The username or the API key
:param password: The password for basic authentication or None. Defaults to None
"""
def __init__(self, url, principal, password=None, proxies={}, cert=True):
self.url = url
self.principal = principal
self.password = password
self.proxies = proxies
if self.password is not None:
self.auth = requests.auth.HTTPBasicAuth(self.principal,self.password)
else:
self.auth = BearerAuth(self.principal)
self.cert = cert
# Create a CaseHelper instance
self.case = CaseHelper(self)
def __find_rows(self, find_url, **attributes):
"""
:param find_url: URL of the find api
:type find_url: string
:return: The Response returned by requests including the list of documents based on find_url
:rtype: Response object
"""
req = self.url + find_url
# Add range and sort parameters
params = {
"range": attributes.get("range", "all"),
"sort": attributes.get("sort", [])
}
# Add body
data = {
"query": attributes.get("query", {})
}
try:
return requests.post(req, params=params, json=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise TheHiveException("Error: {}".format(e))
def do_patch(self, api_url, **attributes):
return requests.patch(self.url + api_url, headers={'Content-Type': 'application/json'}, json=attributes,
proxies=self.proxies, auth=self.auth, verify=self.cert)
def create_case(self, case):
"""
:param case: The case details
:type case: Case defined in models.py
:return: TheHive case
:rtype: json
"""
req = self.url + "/api/case"
data = case.jsonify()
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, data=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseException("Case create error: {}".format(e))
def update_case(self, case, fields=[]):
"""
Update a case.
:param case: The case to update. The case's `id` determines which case to update.
:param fields: Optional parameter, an array of fields names, the ones we want to update
:return:
"""
req = self.url + "/api/case/{}".format(case.id)
# Choose which attributes to send
update_keys = [
'title', 'description', 'severity', 'startDate', 'owner', 'flag', 'tlp', 'tags', 'status', 'resolutionStatus',
'impactStatus', 'summary', 'endDate', 'metrics', 'customFields'
]
data = {k: v for k, v in case.__dict__.items() if (len(fields) > 0 and k in fields) or (len(fields) == 0 and k in update_keys)}
try:
return requests.patch(req, headers={'Content-Type': 'application/json'}, json=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException:
raise CaseException("Case update error: {}".format(e))
def update_case_task(self, task):
"""
:Updates TheHive Task
:param case: The task to update. The task's `id` determines which Task to update.
:return:
"""
req = self.url + "/api/case/task/{}".format(task.id)
# Choose which attributes to send
update_keys = [
'title', 'description', 'status', 'order', 'user', 'owner', 'flag', 'endDate'
]
data = {k: v for k, v in task.__dict__.items() if k in update_keys}
try:
return requests.patch(req, headers={'Content-Type': 'application/json'}, json=data,
proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseTaskException("Case task update error: {}".format(e))
def create_task_log(self, task_id, case_task_log):
"""
:param task_id: Task identifier
:param case_task_log: TheHive log
:type case_task_log: CaseTaskLog defined in models.py
:return: TheHive log
:rtype: json
"""
req = self.url + "/api/case/task/{}/log".format(task_id)
data = {'_json': json.dumps({"message":case_task_log.message})}
if case_task_log.file:
f = {'attachment': (os.path.basename(case_task_log.file), open(case_task_log.file, 'rb'), magic.Magic(mime=True).from_file(case_task_log.file))}
try:
return requests.post(req, data=data,files=f, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseTaskException("Case task log create error: {}".format(e))
else:
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, data=json.dumps({'message':case_task_log.message}), proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseTaskException("Case task log create error: {}".format(e))
def create_case_observable(self, case_id, case_observable):
"""
:param case_id: Case identifier
:param case_observable: TheHive observable
:type case_observable: CaseObservable defined in models.py
:return: TheHive observable
:rtype: json
"""
req = self.url + "/api/case/{}/artifact".format(case_id)
if case_observable.dataType == 'file':
try:
mesg = json.dumps({ "dataType": case_observable.dataType,
"message": case_observable.message,
"tlp": case_observable.tlp,
"tags": case_observable.tags,
"ioc": case_observable.ioc
})
data = {"_json": mesg}
return requests.post(req, data=data, files=case_observable.data[0], proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseObservableException("Case observable create error: {}".format(e))
else:
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, data=case_observable.jsonify(), proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseObservableException("Case observable create error: {}".format(e))
def get_case(self, case_id):
"""
:param case_id: Case identifier
:return: TheHive case
:rtype: json
"""
req = self.url + "/api/case/{}".format(case_id)
try:
return requests.get(req, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseException("Case fetch error: {}".format(e))
def find_cases(self, **attributes):
return self.__find_rows("/api/case/_search", **attributes)
def find_first(self, **attributes):
"""
:return: first case of result set given by query
:rtype: dict
"""
return self.find_cases(**attributes).json()[0]
def get_case_observables(self, case_id, **attributes):
"""
:param case_id: Case identifier
:return: list of observables
;rtype: json
"""
req = self.url + "/api/case/artifact/_search"
# Add range and sort parameters
params = {
"range": attributes.get("range", "all"),
"sort": attributes.get("sort", [])
}
# Add body
parent_criteria = Parent('case', Id(case_id))
# Append the custom query if specified
if "query" in attributes:
criteria = And(parent_criteria, attributes["query"])
else:
criteria = parent_criteria
data = {
"query": criteria
}
try:
return requests.post(req, params=params, json=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseObservableException("Case observables search error: {}".format(e))
def get_case_tasks(self, case_id, **attributes):
req = self.url + "/api/case/task/_search"
# Add range and sort parameters
params = {
"range": attributes.get("range", "all"),
"sort": attributes.get("sort", [])
}
# Add body
parent_criteria = Parent('case', Id(case_id))
# Append the custom query if specified
if "query" in attributes:
criteria = And(parent_criteria, attributes["query"])
else:
criteria = parent_criteria
data = {
"query": criteria
}
try:
return requests.post(req, params=params, json=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseTaskException("Case tasks search error: {}".format(e))
def get_linked_cases(self, case_id):
"""
:param case_id: Case identifier
:return: TheHive case(s)
:rtype: json
"""
req = self.url + "/api/case/{}/links".format(case_id)
try:
return requests.get(req, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseException("Linked cases fetch error: {}".format(e))
def find_case_templates(self, **attributes):
"""
:return: list of case templates
:rtype: json
"""
return self.__find_rows("/api/case/template/_search", **attributes)
def get_case_template(self, name):
"""
:param name: Case template name
:return: TheHive case template
:rtype: json
"""
req = self.url + "/api/case/template/_search"
data = {
"query": And(Eq("name", name), Eq("status", "Ok"))
}
try:
response = requests.post(req, json=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
json_response = response.json()
if response.status_code == 200 and len(json_response) > 0:
return response.json()[0]
else:
raise CaseTemplateException("Case template fetch error: Unable to find case template {}".format(name))
except requests.exceptions.RequestException as e:
raise CaseTemplateException("Case template fetch error: {}".format(e))
def get_task_logs(self, taskId):
"""
:param taskId: Task identifier
:type caseTaskLog: CaseTaskLog defined in models.py
:return: TheHive logs
:rtype: json
"""
req = self.url + "/api/case/task/{}/log".format(taskId)
try:
return requests.get(req, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseTaskException("Case task logs search error: {}".format(e))
def create_alert(self, alert):
"""
:param alert: TheHive alert
:type alert: Alert defined in models.py
:return: TheHive alert
:rtype: json
"""
req = self.url + "/api/alert"
data = alert.jsonify()
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, data=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise AlertException("Alert create error: {}".format(e))
def mark_alert_as_read(self, alert_id):
"""
Mark an alert as read.
:param alert_id: The ID of the alert to mark as read.
:return:
"""
req = self.url + "/api/alert/{}/markAsRead".format(alert_id)
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException:
raise AlertException("Mark alert as read error: {}".format(e))
def mark_alert_as_unread(self, alert_id):
"""
Mark an alert as unread.
:param alert_id: The ID of the alert to mark as unread.
:return:
"""
req = self.url + "/api/alert/{}/markAsUnread".format(alert_id)
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException:
raise AlertException("Mark alert as unread error: {}".format(e))
def update_alert(self, alert_id, alert, fields=[]):
"""
Update an alert.
:param alert_id: The ID of the alert to update.
:param data: The alert to update.
:param fields: Optional parameter, an array of fields names, the ones we want to update
:return:
"""
req = self.url + "/api/alert/{}".format(alert_id)
# update only the alert attributes that are not read-only
update_keys = ['tlp', 'severity', 'tags', 'caseTemplate', 'title', 'description']
data = {k: v for k, v in alert.__dict__.items() if
(len(fields) > 0 and k in fields) or (len(fields) == 0 and k in update_keys)}
if hasattr(alert, 'artifacts'):
data['artifacts'] = [a.__dict__ for a in alert.artifacts]
try:
return requests.patch(req, headers={'Content-Type': 'application/json'}, json=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException:
raise AlertException("Alert update error: {}".format(e))
def get_alert(self, alert_id):
"""
:param alert_id: Alert identifier
:return: TheHive Alert
:rtype: json
"""
req = self.url + "/api/alert/{}".format(alert_id)
try:
return requests.get(req, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise AlertException("Alert fetch error: {}".format(e))
def find_alerts(self, **attributes):
"""
:return: list of Alerts
:rtype: json
"""
return self.__find_rows("/api/alert/_search", **attributes)
def promote_alert_to_case(self, alert_id):
"""
This uses the TheHiveAPI to promote an alert to a case
:param alert_id: Alert identifier
:return: TheHive Case
:rtype: json
"""
req = self.url + "/api/alert/{}/createCase".format(alert_id)
try:
return requests.post(req, headers={'Content-Type': 'application/json'},
proxies=self.proxies, auth=self.auth,
verify=self.cert, data=json.dumps({}))
except requests.exceptions.RequestException as the_exception:
raise AlertException("Couldn't promote alert to case: {}".format(the_exception))
return None
def run_analyzer(self, cortex_id, artifact_id, analyzer_id):
"""
:param cortex_id: identifier of the Cortex server
:param artifact_id: identifier of the artifact as found with an artifact search
:param analyzer_id: name of the analyzer used by the job
:rtype: json
"""
req = self.url + "/api/connector/cortex/job"
try:
data = json.dumps({ "cortexId": cortex_id,
"artifactId": artifact_id,
"analyzerId": analyzer_id
})
return requests.post(req, headers={'Content-Type': 'application/json'}, data=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise TheHiveException("Analyzer run error: {}".format(e))
def find_tasks(self, **attributes):
"""
:return: list of Tasks
:rtype: json
"""
return self.__find_rows("/api/case/task/_search", **attributes)
|
TheHive-Project/TheHive4py
|
thehive4py/api.py
|
TheHiveApi.update_case_task
|
python
|
def update_case_task(self, task):
req = self.url + "/api/case/task/{}".format(task.id)
# Choose which attributes to send
update_keys = [
'title', 'description', 'status', 'order', 'user', 'owner', 'flag', 'endDate'
]
data = {k: v for k, v in task.__dict__.items() if k in update_keys}
try:
return requests.patch(req, headers={'Content-Type': 'application/json'}, json=data,
proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseTaskException("Case task update error: {}".format(e))
|
:Updates TheHive Task
:param case: The task to update. The task's `id` determines which Task to update.
:return:
|
train
|
https://github.com/TheHive-Project/TheHive4py/blob/35762bbd50d8376943268464326b59c752d6241b/thehive4py/api.py#L145-L164
| null |
class TheHiveApi:
"""
Python API for TheHive
:param url: thehive URL
:param principal: The username or the API key
:param password: The password for basic authentication or None. Defaults to None
"""
def __init__(self, url, principal, password=None, proxies={}, cert=True):
self.url = url
self.principal = principal
self.password = password
self.proxies = proxies
if self.password is not None:
self.auth = requests.auth.HTTPBasicAuth(self.principal,self.password)
else:
self.auth = BearerAuth(self.principal)
self.cert = cert
# Create a CaseHelper instance
self.case = CaseHelper(self)
def __find_rows(self, find_url, **attributes):
"""
:param find_url: URL of the find api
:type find_url: string
:return: The Response returned by requests including the list of documents based on find_url
:rtype: Response object
"""
req = self.url + find_url
# Add range and sort parameters
params = {
"range": attributes.get("range", "all"),
"sort": attributes.get("sort", [])
}
# Add body
data = {
"query": attributes.get("query", {})
}
try:
return requests.post(req, params=params, json=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise TheHiveException("Error: {}".format(e))
def do_patch(self, api_url, **attributes):
return requests.patch(self.url + api_url, headers={'Content-Type': 'application/json'}, json=attributes,
proxies=self.proxies, auth=self.auth, verify=self.cert)
def create_case(self, case):
"""
:param case: The case details
:type case: Case defined in models.py
:return: TheHive case
:rtype: json
"""
req = self.url + "/api/case"
data = case.jsonify()
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, data=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseException("Case create error: {}".format(e))
def update_case(self, case, fields=[]):
"""
Update a case.
:param case: The case to update. The case's `id` determines which case to update.
:param fields: Optional parameter, an array of fields names, the ones we want to update
:return:
"""
req = self.url + "/api/case/{}".format(case.id)
# Choose which attributes to send
update_keys = [
'title', 'description', 'severity', 'startDate', 'owner', 'flag', 'tlp', 'tags', 'status', 'resolutionStatus',
'impactStatus', 'summary', 'endDate', 'metrics', 'customFields'
]
data = {k: v for k, v in case.__dict__.items() if (len(fields) > 0 and k in fields) or (len(fields) == 0 and k in update_keys)}
try:
return requests.patch(req, headers={'Content-Type': 'application/json'}, json=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException:
raise CaseException("Case update error: {}".format(e))
def create_case_task(self, case_id, case_task):
"""
:param case_id: Case identifier
:param case_task: TheHive task
:type case_task: CaseTask defined in models.py
:return: TheHive task
:rtype: json
"""
req = self.url + "/api/case/{}/task".format(case_id)
data = case_task.jsonify()
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, data=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseTaskException("Case task create error: {}".format(e))
def create_task_log(self, task_id, case_task_log):
"""
:param task_id: Task identifier
:param case_task_log: TheHive log
:type case_task_log: CaseTaskLog defined in models.py
:return: TheHive log
:rtype: json
"""
req = self.url + "/api/case/task/{}/log".format(task_id)
data = {'_json': json.dumps({"message":case_task_log.message})}
if case_task_log.file:
f = {'attachment': (os.path.basename(case_task_log.file), open(case_task_log.file, 'rb'), magic.Magic(mime=True).from_file(case_task_log.file))}
try:
return requests.post(req, data=data,files=f, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseTaskException("Case task log create error: {}".format(e))
else:
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, data=json.dumps({'message':case_task_log.message}), proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseTaskException("Case task log create error: {}".format(e))
def create_case_observable(self, case_id, case_observable):
"""
:param case_id: Case identifier
:param case_observable: TheHive observable
:type case_observable: CaseObservable defined in models.py
:return: TheHive observable
:rtype: json
"""
req = self.url + "/api/case/{}/artifact".format(case_id)
if case_observable.dataType == 'file':
try:
mesg = json.dumps({ "dataType": case_observable.dataType,
"message": case_observable.message,
"tlp": case_observable.tlp,
"tags": case_observable.tags,
"ioc": case_observable.ioc
})
data = {"_json": mesg}
return requests.post(req, data=data, files=case_observable.data[0], proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseObservableException("Case observable create error: {}".format(e))
else:
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, data=case_observable.jsonify(), proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseObservableException("Case observable create error: {}".format(e))
def get_case(self, case_id):
"""
:param case_id: Case identifier
:return: TheHive case
:rtype: json
"""
req = self.url + "/api/case/{}".format(case_id)
try:
return requests.get(req, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseException("Case fetch error: {}".format(e))
def find_cases(self, **attributes):
return self.__find_rows("/api/case/_search", **attributes)
def find_first(self, **attributes):
"""
:return: first case of result set given by query
:rtype: dict
"""
return self.find_cases(**attributes).json()[0]
def get_case_observables(self, case_id, **attributes):
"""
:param case_id: Case identifier
:return: list of observables
;rtype: json
"""
req = self.url + "/api/case/artifact/_search"
# Add range and sort parameters
params = {
"range": attributes.get("range", "all"),
"sort": attributes.get("sort", [])
}
# Add body
parent_criteria = Parent('case', Id(case_id))
# Append the custom query if specified
if "query" in attributes:
criteria = And(parent_criteria, attributes["query"])
else:
criteria = parent_criteria
data = {
"query": criteria
}
try:
return requests.post(req, params=params, json=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseObservableException("Case observables search error: {}".format(e))
def get_case_tasks(self, case_id, **attributes):
req = self.url + "/api/case/task/_search"
# Add range and sort parameters
params = {
"range": attributes.get("range", "all"),
"sort": attributes.get("sort", [])
}
# Add body
parent_criteria = Parent('case', Id(case_id))
# Append the custom query if specified
if "query" in attributes:
criteria = And(parent_criteria, attributes["query"])
else:
criteria = parent_criteria
data = {
"query": criteria
}
try:
return requests.post(req, params=params, json=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseTaskException("Case tasks search error: {}".format(e))
def get_linked_cases(self, case_id):
"""
:param case_id: Case identifier
:return: TheHive case(s)
:rtype: json
"""
req = self.url + "/api/case/{}/links".format(case_id)
try:
return requests.get(req, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseException("Linked cases fetch error: {}".format(e))
def find_case_templates(self, **attributes):
"""
:return: list of case templates
:rtype: json
"""
return self.__find_rows("/api/case/template/_search", **attributes)
def get_case_template(self, name):
"""
:param name: Case template name
:return: TheHive case template
:rtype: json
"""
req = self.url + "/api/case/template/_search"
data = {
"query": And(Eq("name", name), Eq("status", "Ok"))
}
try:
response = requests.post(req, json=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
json_response = response.json()
if response.status_code == 200 and len(json_response) > 0:
return response.json()[0]
else:
raise CaseTemplateException("Case template fetch error: Unable to find case template {}".format(name))
except requests.exceptions.RequestException as e:
raise CaseTemplateException("Case template fetch error: {}".format(e))
def get_task_logs(self, taskId):
"""
:param taskId: Task identifier
:type caseTaskLog: CaseTaskLog defined in models.py
:return: TheHive logs
:rtype: json
"""
req = self.url + "/api/case/task/{}/log".format(taskId)
try:
return requests.get(req, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseTaskException("Case task logs search error: {}".format(e))
def create_alert(self, alert):
"""
:param alert: TheHive alert
:type alert: Alert defined in models.py
:return: TheHive alert
:rtype: json
"""
req = self.url + "/api/alert"
data = alert.jsonify()
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, data=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise AlertException("Alert create error: {}".format(e))
def mark_alert_as_read(self, alert_id):
"""
Mark an alert as read.
:param alert_id: The ID of the alert to mark as read.
:return:
"""
req = self.url + "/api/alert/{}/markAsRead".format(alert_id)
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException:
raise AlertException("Mark alert as read error: {}".format(e))
def mark_alert_as_unread(self, alert_id):
"""
Mark an alert as unread.
:param alert_id: The ID of the alert to mark as unread.
:return:
"""
req = self.url + "/api/alert/{}/markAsUnread".format(alert_id)
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException:
raise AlertException("Mark alert as unread error: {}".format(e))
def update_alert(self, alert_id, alert, fields=[]):
"""
Update an alert.
:param alert_id: The ID of the alert to update.
:param data: The alert to update.
:param fields: Optional parameter, an array of fields names, the ones we want to update
:return:
"""
req = self.url + "/api/alert/{}".format(alert_id)
# update only the alert attributes that are not read-only
update_keys = ['tlp', 'severity', 'tags', 'caseTemplate', 'title', 'description']
data = {k: v for k, v in alert.__dict__.items() if
(len(fields) > 0 and k in fields) or (len(fields) == 0 and k in update_keys)}
if hasattr(alert, 'artifacts'):
data['artifacts'] = [a.__dict__ for a in alert.artifacts]
try:
return requests.patch(req, headers={'Content-Type': 'application/json'}, json=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException:
raise AlertException("Alert update error: {}".format(e))
def get_alert(self, alert_id):
"""
:param alert_id: Alert identifier
:return: TheHive Alert
:rtype: json
"""
req = self.url + "/api/alert/{}".format(alert_id)
try:
return requests.get(req, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise AlertException("Alert fetch error: {}".format(e))
def find_alerts(self, **attributes):
"""
:return: list of Alerts
:rtype: json
"""
return self.__find_rows("/api/alert/_search", **attributes)
def promote_alert_to_case(self, alert_id):
"""
This uses the TheHiveAPI to promote an alert to a case
:param alert_id: Alert identifier
:return: TheHive Case
:rtype: json
"""
req = self.url + "/api/alert/{}/createCase".format(alert_id)
try:
return requests.post(req, headers={'Content-Type': 'application/json'},
proxies=self.proxies, auth=self.auth,
verify=self.cert, data=json.dumps({}))
except requests.exceptions.RequestException as the_exception:
raise AlertException("Couldn't promote alert to case: {}".format(the_exception))
return None
def run_analyzer(self, cortex_id, artifact_id, analyzer_id):
"""
:param cortex_id: identifier of the Cortex server
:param artifact_id: identifier of the artifact as found with an artifact search
:param analyzer_id: name of the analyzer used by the job
:rtype: json
"""
req = self.url + "/api/connector/cortex/job"
try:
data = json.dumps({ "cortexId": cortex_id,
"artifactId": artifact_id,
"analyzerId": analyzer_id
})
return requests.post(req, headers={'Content-Type': 'application/json'}, data=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise TheHiveException("Analyzer run error: {}".format(e))
def find_tasks(self, **attributes):
"""
:return: list of Tasks
:rtype: json
"""
return self.__find_rows("/api/case/task/_search", **attributes)
|
TheHive-Project/TheHive4py
|
thehive4py/api.py
|
TheHiveApi.create_task_log
|
python
|
def create_task_log(self, task_id, case_task_log):
req = self.url + "/api/case/task/{}/log".format(task_id)
data = {'_json': json.dumps({"message":case_task_log.message})}
if case_task_log.file:
f = {'attachment': (os.path.basename(case_task_log.file), open(case_task_log.file, 'rb'), magic.Magic(mime=True).from_file(case_task_log.file))}
try:
return requests.post(req, data=data,files=f, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseTaskException("Case task log create error: {}".format(e))
else:
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, data=json.dumps({'message':case_task_log.message}), proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseTaskException("Case task log create error: {}".format(e))
|
:param task_id: Task identifier
:param case_task_log: TheHive log
:type case_task_log: CaseTaskLog defined in models.py
:return: TheHive log
:rtype: json
|
train
|
https://github.com/TheHive-Project/TheHive4py/blob/35762bbd50d8376943268464326b59c752d6241b/thehive4py/api.py#L166-L189
| null |
class TheHiveApi:
"""
Python API for TheHive
:param url: thehive URL
:param principal: The username or the API key
:param password: The password for basic authentication or None. Defaults to None
"""
def __init__(self, url, principal, password=None, proxies={}, cert=True):
self.url = url
self.principal = principal
self.password = password
self.proxies = proxies
if self.password is not None:
self.auth = requests.auth.HTTPBasicAuth(self.principal,self.password)
else:
self.auth = BearerAuth(self.principal)
self.cert = cert
# Create a CaseHelper instance
self.case = CaseHelper(self)
def __find_rows(self, find_url, **attributes):
"""
:param find_url: URL of the find api
:type find_url: string
:return: The Response returned by requests including the list of documents based on find_url
:rtype: Response object
"""
req = self.url + find_url
# Add range and sort parameters
params = {
"range": attributes.get("range", "all"),
"sort": attributes.get("sort", [])
}
# Add body
data = {
"query": attributes.get("query", {})
}
try:
return requests.post(req, params=params, json=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise TheHiveException("Error: {}".format(e))
def do_patch(self, api_url, **attributes):
return requests.patch(self.url + api_url, headers={'Content-Type': 'application/json'}, json=attributes,
proxies=self.proxies, auth=self.auth, verify=self.cert)
def create_case(self, case):
"""
:param case: The case details
:type case: Case defined in models.py
:return: TheHive case
:rtype: json
"""
req = self.url + "/api/case"
data = case.jsonify()
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, data=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseException("Case create error: {}".format(e))
def update_case(self, case, fields=[]):
"""
Update a case.
:param case: The case to update. The case's `id` determines which case to update.
:param fields: Optional parameter, an array of fields names, the ones we want to update
:return:
"""
req = self.url + "/api/case/{}".format(case.id)
# Choose which attributes to send
update_keys = [
'title', 'description', 'severity', 'startDate', 'owner', 'flag', 'tlp', 'tags', 'status', 'resolutionStatus',
'impactStatus', 'summary', 'endDate', 'metrics', 'customFields'
]
data = {k: v for k, v in case.__dict__.items() if (len(fields) > 0 and k in fields) or (len(fields) == 0 and k in update_keys)}
try:
return requests.patch(req, headers={'Content-Type': 'application/json'}, json=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException:
raise CaseException("Case update error: {}".format(e))
def create_case_task(self, case_id, case_task):
"""
:param case_id: Case identifier
:param case_task: TheHive task
:type case_task: CaseTask defined in models.py
:return: TheHive task
:rtype: json
"""
req = self.url + "/api/case/{}/task".format(case_id)
data = case_task.jsonify()
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, data=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseTaskException("Case task create error: {}".format(e))
def update_case_task(self, task):
"""
:Updates TheHive Task
:param case: The task to update. The task's `id` determines which Task to update.
:return:
"""
req = self.url + "/api/case/task/{}".format(task.id)
# Choose which attributes to send
update_keys = [
'title', 'description', 'status', 'order', 'user', 'owner', 'flag', 'endDate'
]
data = {k: v for k, v in task.__dict__.items() if k in update_keys}
try:
return requests.patch(req, headers={'Content-Type': 'application/json'}, json=data,
proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseTaskException("Case task update error: {}".format(e))
def create_case_observable(self, case_id, case_observable):
"""
:param case_id: Case identifier
:param case_observable: TheHive observable
:type case_observable: CaseObservable defined in models.py
:return: TheHive observable
:rtype: json
"""
req = self.url + "/api/case/{}/artifact".format(case_id)
if case_observable.dataType == 'file':
try:
mesg = json.dumps({ "dataType": case_observable.dataType,
"message": case_observable.message,
"tlp": case_observable.tlp,
"tags": case_observable.tags,
"ioc": case_observable.ioc
})
data = {"_json": mesg}
return requests.post(req, data=data, files=case_observable.data[0], proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseObservableException("Case observable create error: {}".format(e))
else:
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, data=case_observable.jsonify(), proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseObservableException("Case observable create error: {}".format(e))
def get_case(self, case_id):
"""
:param case_id: Case identifier
:return: TheHive case
:rtype: json
"""
req = self.url + "/api/case/{}".format(case_id)
try:
return requests.get(req, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseException("Case fetch error: {}".format(e))
def find_cases(self, **attributes):
return self.__find_rows("/api/case/_search", **attributes)
def find_first(self, **attributes):
"""
:return: first case of result set given by query
:rtype: dict
"""
return self.find_cases(**attributes).json()[0]
def get_case_observables(self, case_id, **attributes):
"""
:param case_id: Case identifier
:return: list of observables
;rtype: json
"""
req = self.url + "/api/case/artifact/_search"
# Add range and sort parameters
params = {
"range": attributes.get("range", "all"),
"sort": attributes.get("sort", [])
}
# Add body
parent_criteria = Parent('case', Id(case_id))
# Append the custom query if specified
if "query" in attributes:
criteria = And(parent_criteria, attributes["query"])
else:
criteria = parent_criteria
data = {
"query": criteria
}
try:
return requests.post(req, params=params, json=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseObservableException("Case observables search error: {}".format(e))
def get_case_tasks(self, case_id, **attributes):
req = self.url + "/api/case/task/_search"
# Add range and sort parameters
params = {
"range": attributes.get("range", "all"),
"sort": attributes.get("sort", [])
}
# Add body
parent_criteria = Parent('case', Id(case_id))
# Append the custom query if specified
if "query" in attributes:
criteria = And(parent_criteria, attributes["query"])
else:
criteria = parent_criteria
data = {
"query": criteria
}
try:
return requests.post(req, params=params, json=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseTaskException("Case tasks search error: {}".format(e))
def get_linked_cases(self, case_id):
"""
:param case_id: Case identifier
:return: TheHive case(s)
:rtype: json
"""
req = self.url + "/api/case/{}/links".format(case_id)
try:
return requests.get(req, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseException("Linked cases fetch error: {}".format(e))
def find_case_templates(self, **attributes):
"""
:return: list of case templates
:rtype: json
"""
return self.__find_rows("/api/case/template/_search", **attributes)
def get_case_template(self, name):
"""
:param name: Case template name
:return: TheHive case template
:rtype: json
"""
req = self.url + "/api/case/template/_search"
data = {
"query": And(Eq("name", name), Eq("status", "Ok"))
}
try:
response = requests.post(req, json=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
json_response = response.json()
if response.status_code == 200 and len(json_response) > 0:
return response.json()[0]
else:
raise CaseTemplateException("Case template fetch error: Unable to find case template {}".format(name))
except requests.exceptions.RequestException as e:
raise CaseTemplateException("Case template fetch error: {}".format(e))
def get_task_logs(self, taskId):
"""
:param taskId: Task identifier
:type caseTaskLog: CaseTaskLog defined in models.py
:return: TheHive logs
:rtype: json
"""
req = self.url + "/api/case/task/{}/log".format(taskId)
try:
return requests.get(req, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseTaskException("Case task logs search error: {}".format(e))
def create_alert(self, alert):
"""
:param alert: TheHive alert
:type alert: Alert defined in models.py
:return: TheHive alert
:rtype: json
"""
req = self.url + "/api/alert"
data = alert.jsonify()
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, data=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise AlertException("Alert create error: {}".format(e))
def mark_alert_as_read(self, alert_id):
"""
Mark an alert as read.
:param alert_id: The ID of the alert to mark as read.
:return:
"""
req = self.url + "/api/alert/{}/markAsRead".format(alert_id)
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException:
raise AlertException("Mark alert as read error: {}".format(e))
def mark_alert_as_unread(self, alert_id):
"""
Mark an alert as unread.
:param alert_id: The ID of the alert to mark as unread.
:return:
"""
req = self.url + "/api/alert/{}/markAsUnread".format(alert_id)
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException:
raise AlertException("Mark alert as unread error: {}".format(e))
def update_alert(self, alert_id, alert, fields=[]):
"""
Update an alert.
:param alert_id: The ID of the alert to update.
:param data: The alert to update.
:param fields: Optional parameter, an array of fields names, the ones we want to update
:return:
"""
req = self.url + "/api/alert/{}".format(alert_id)
# update only the alert attributes that are not read-only
update_keys = ['tlp', 'severity', 'tags', 'caseTemplate', 'title', 'description']
data = {k: v for k, v in alert.__dict__.items() if
(len(fields) > 0 and k in fields) or (len(fields) == 0 and k in update_keys)}
if hasattr(alert, 'artifacts'):
data['artifacts'] = [a.__dict__ for a in alert.artifacts]
try:
return requests.patch(req, headers={'Content-Type': 'application/json'}, json=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException:
raise AlertException("Alert update error: {}".format(e))
def get_alert(self, alert_id):
"""
:param alert_id: Alert identifier
:return: TheHive Alert
:rtype: json
"""
req = self.url + "/api/alert/{}".format(alert_id)
try:
return requests.get(req, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise AlertException("Alert fetch error: {}".format(e))
def find_alerts(self, **attributes):
"""
:return: list of Alerts
:rtype: json
"""
return self.__find_rows("/api/alert/_search", **attributes)
def promote_alert_to_case(self, alert_id):
"""
This uses the TheHiveAPI to promote an alert to a case
:param alert_id: Alert identifier
:return: TheHive Case
:rtype: json
"""
req = self.url + "/api/alert/{}/createCase".format(alert_id)
try:
return requests.post(req, headers={'Content-Type': 'application/json'},
proxies=self.proxies, auth=self.auth,
verify=self.cert, data=json.dumps({}))
except requests.exceptions.RequestException as the_exception:
raise AlertException("Couldn't promote alert to case: {}".format(the_exception))
return None
def run_analyzer(self, cortex_id, artifact_id, analyzer_id):
"""
:param cortex_id: identifier of the Cortex server
:param artifact_id: identifier of the artifact as found with an artifact search
:param analyzer_id: name of the analyzer used by the job
:rtype: json
"""
req = self.url + "/api/connector/cortex/job"
try:
data = json.dumps({ "cortexId": cortex_id,
"artifactId": artifact_id,
"analyzerId": analyzer_id
})
return requests.post(req, headers={'Content-Type': 'application/json'}, data=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise TheHiveException("Analyzer run error: {}".format(e))
def find_tasks(self, **attributes):
"""
:return: list of Tasks
:rtype: json
"""
return self.__find_rows("/api/case/task/_search", **attributes)
|
TheHive-Project/TheHive4py
|
thehive4py/api.py
|
TheHiveApi.create_case_observable
|
python
|
def create_case_observable(self, case_id, case_observable):
req = self.url + "/api/case/{}/artifact".format(case_id)
if case_observable.dataType == 'file':
try:
mesg = json.dumps({ "dataType": case_observable.dataType,
"message": case_observable.message,
"tlp": case_observable.tlp,
"tags": case_observable.tags,
"ioc": case_observable.ioc
})
data = {"_json": mesg}
return requests.post(req, data=data, files=case_observable.data[0], proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseObservableException("Case observable create error: {}".format(e))
else:
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, data=case_observable.jsonify(), proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseObservableException("Case observable create error: {}".format(e))
|
:param case_id: Case identifier
:param case_observable: TheHive observable
:type case_observable: CaseObservable defined in models.py
:return: TheHive observable
:rtype: json
|
train
|
https://github.com/TheHive-Project/TheHive4py/blob/35762bbd50d8376943268464326b59c752d6241b/thehive4py/api.py#L191-L219
|
[
"def jsonify(self):\n return json.dumps(self, sort_keys=True, indent=4, cls=CustomJsonEncoder)\n"
] |
class TheHiveApi:
"""
Python API for TheHive
:param url: thehive URL
:param principal: The username or the API key
:param password: The password for basic authentication or None. Defaults to None
"""
def __init__(self, url, principal, password=None, proxies={}, cert=True):
self.url = url
self.principal = principal
self.password = password
self.proxies = proxies
if self.password is not None:
self.auth = requests.auth.HTTPBasicAuth(self.principal,self.password)
else:
self.auth = BearerAuth(self.principal)
self.cert = cert
# Create a CaseHelper instance
self.case = CaseHelper(self)
def __find_rows(self, find_url, **attributes):
"""
:param find_url: URL of the find api
:type find_url: string
:return: The Response returned by requests including the list of documents based on find_url
:rtype: Response object
"""
req = self.url + find_url
# Add range and sort parameters
params = {
"range": attributes.get("range", "all"),
"sort": attributes.get("sort", [])
}
# Add body
data = {
"query": attributes.get("query", {})
}
try:
return requests.post(req, params=params, json=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise TheHiveException("Error: {}".format(e))
def do_patch(self, api_url, **attributes):
return requests.patch(self.url + api_url, headers={'Content-Type': 'application/json'}, json=attributes,
proxies=self.proxies, auth=self.auth, verify=self.cert)
def create_case(self, case):
"""
:param case: The case details
:type case: Case defined in models.py
:return: TheHive case
:rtype: json
"""
req = self.url + "/api/case"
data = case.jsonify()
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, data=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseException("Case create error: {}".format(e))
def update_case(self, case, fields=[]):
"""
Update a case.
:param case: The case to update. The case's `id` determines which case to update.
:param fields: Optional parameter, an array of fields names, the ones we want to update
:return:
"""
req = self.url + "/api/case/{}".format(case.id)
# Choose which attributes to send
update_keys = [
'title', 'description', 'severity', 'startDate', 'owner', 'flag', 'tlp', 'tags', 'status', 'resolutionStatus',
'impactStatus', 'summary', 'endDate', 'metrics', 'customFields'
]
data = {k: v for k, v in case.__dict__.items() if (len(fields) > 0 and k in fields) or (len(fields) == 0 and k in update_keys)}
try:
return requests.patch(req, headers={'Content-Type': 'application/json'}, json=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException:
raise CaseException("Case update error: {}".format(e))
def create_case_task(self, case_id, case_task):
"""
:param case_id: Case identifier
:param case_task: TheHive task
:type case_task: CaseTask defined in models.py
:return: TheHive task
:rtype: json
"""
req = self.url + "/api/case/{}/task".format(case_id)
data = case_task.jsonify()
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, data=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseTaskException("Case task create error: {}".format(e))
def update_case_task(self, task):
"""
:Updates TheHive Task
:param case: The task to update. The task's `id` determines which Task to update.
:return:
"""
req = self.url + "/api/case/task/{}".format(task.id)
# Choose which attributes to send
update_keys = [
'title', 'description', 'status', 'order', 'user', 'owner', 'flag', 'endDate'
]
data = {k: v for k, v in task.__dict__.items() if k in update_keys}
try:
return requests.patch(req, headers={'Content-Type': 'application/json'}, json=data,
proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseTaskException("Case task update error: {}".format(e))
def create_task_log(self, task_id, case_task_log):
"""
:param task_id: Task identifier
:param case_task_log: TheHive log
:type case_task_log: CaseTaskLog defined in models.py
:return: TheHive log
:rtype: json
"""
req = self.url + "/api/case/task/{}/log".format(task_id)
data = {'_json': json.dumps({"message":case_task_log.message})}
if case_task_log.file:
f = {'attachment': (os.path.basename(case_task_log.file), open(case_task_log.file, 'rb'), magic.Magic(mime=True).from_file(case_task_log.file))}
try:
return requests.post(req, data=data,files=f, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseTaskException("Case task log create error: {}".format(e))
else:
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, data=json.dumps({'message':case_task_log.message}), proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseTaskException("Case task log create error: {}".format(e))
def get_case(self, case_id):
"""
:param case_id: Case identifier
:return: TheHive case
:rtype: json
"""
req = self.url + "/api/case/{}".format(case_id)
try:
return requests.get(req, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseException("Case fetch error: {}".format(e))
def find_cases(self, **attributes):
return self.__find_rows("/api/case/_search", **attributes)
def find_first(self, **attributes):
"""
:return: first case of result set given by query
:rtype: dict
"""
return self.find_cases(**attributes).json()[0]
def get_case_observables(self, case_id, **attributes):
"""
:param case_id: Case identifier
:return: list of observables
;rtype: json
"""
req = self.url + "/api/case/artifact/_search"
# Add range and sort parameters
params = {
"range": attributes.get("range", "all"),
"sort": attributes.get("sort", [])
}
# Add body
parent_criteria = Parent('case', Id(case_id))
# Append the custom query if specified
if "query" in attributes:
criteria = And(parent_criteria, attributes["query"])
else:
criteria = parent_criteria
data = {
"query": criteria
}
try:
return requests.post(req, params=params, json=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseObservableException("Case observables search error: {}".format(e))
def get_case_tasks(self, case_id, **attributes):
req = self.url + "/api/case/task/_search"
# Add range and sort parameters
params = {
"range": attributes.get("range", "all"),
"sort": attributes.get("sort", [])
}
# Add body
parent_criteria = Parent('case', Id(case_id))
# Append the custom query if specified
if "query" in attributes:
criteria = And(parent_criteria, attributes["query"])
else:
criteria = parent_criteria
data = {
"query": criteria
}
try:
return requests.post(req, params=params, json=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseTaskException("Case tasks search error: {}".format(e))
def get_linked_cases(self, case_id):
"""
:param case_id: Case identifier
:return: TheHive case(s)
:rtype: json
"""
req = self.url + "/api/case/{}/links".format(case_id)
try:
return requests.get(req, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseException("Linked cases fetch error: {}".format(e))
def find_case_templates(self, **attributes):
"""
:return: list of case templates
:rtype: json
"""
return self.__find_rows("/api/case/template/_search", **attributes)
def get_case_template(self, name):
"""
:param name: Case template name
:return: TheHive case template
:rtype: json
"""
req = self.url + "/api/case/template/_search"
data = {
"query": And(Eq("name", name), Eq("status", "Ok"))
}
try:
response = requests.post(req, json=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
json_response = response.json()
if response.status_code == 200 and len(json_response) > 0:
return response.json()[0]
else:
raise CaseTemplateException("Case template fetch error: Unable to find case template {}".format(name))
except requests.exceptions.RequestException as e:
raise CaseTemplateException("Case template fetch error: {}".format(e))
def get_task_logs(self, taskId):
"""
:param taskId: Task identifier
:type caseTaskLog: CaseTaskLog defined in models.py
:return: TheHive logs
:rtype: json
"""
req = self.url + "/api/case/task/{}/log".format(taskId)
try:
return requests.get(req, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseTaskException("Case task logs search error: {}".format(e))
def create_alert(self, alert):
"""
:param alert: TheHive alert
:type alert: Alert defined in models.py
:return: TheHive alert
:rtype: json
"""
req = self.url + "/api/alert"
data = alert.jsonify()
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, data=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise AlertException("Alert create error: {}".format(e))
def mark_alert_as_read(self, alert_id):
"""
Mark an alert as read.
:param alert_id: The ID of the alert to mark as read.
:return:
"""
req = self.url + "/api/alert/{}/markAsRead".format(alert_id)
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException:
raise AlertException("Mark alert as read error: {}".format(e))
def mark_alert_as_unread(self, alert_id):
"""
Mark an alert as unread.
:param alert_id: The ID of the alert to mark as unread.
:return:
"""
req = self.url + "/api/alert/{}/markAsUnread".format(alert_id)
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException:
raise AlertException("Mark alert as unread error: {}".format(e))
def update_alert(self, alert_id, alert, fields=[]):
"""
Update an alert.
:param alert_id: The ID of the alert to update.
:param data: The alert to update.
:param fields: Optional parameter, an array of fields names, the ones we want to update
:return:
"""
req = self.url + "/api/alert/{}".format(alert_id)
# update only the alert attributes that are not read-only
update_keys = ['tlp', 'severity', 'tags', 'caseTemplate', 'title', 'description']
data = {k: v for k, v in alert.__dict__.items() if
(len(fields) > 0 and k in fields) or (len(fields) == 0 and k in update_keys)}
if hasattr(alert, 'artifacts'):
data['artifacts'] = [a.__dict__ for a in alert.artifacts]
try:
return requests.patch(req, headers={'Content-Type': 'application/json'}, json=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException:
raise AlertException("Alert update error: {}".format(e))
def get_alert(self, alert_id):
"""
:param alert_id: Alert identifier
:return: TheHive Alert
:rtype: json
"""
req = self.url + "/api/alert/{}".format(alert_id)
try:
return requests.get(req, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise AlertException("Alert fetch error: {}".format(e))
def find_alerts(self, **attributes):
"""
:return: list of Alerts
:rtype: json
"""
return self.__find_rows("/api/alert/_search", **attributes)
def promote_alert_to_case(self, alert_id):
"""
This uses the TheHiveAPI to promote an alert to a case
:param alert_id: Alert identifier
:return: TheHive Case
:rtype: json
"""
req = self.url + "/api/alert/{}/createCase".format(alert_id)
try:
return requests.post(req, headers={'Content-Type': 'application/json'},
proxies=self.proxies, auth=self.auth,
verify=self.cert, data=json.dumps({}))
except requests.exceptions.RequestException as the_exception:
raise AlertException("Couldn't promote alert to case: {}".format(the_exception))
return None
def run_analyzer(self, cortex_id, artifact_id, analyzer_id):
"""
:param cortex_id: identifier of the Cortex server
:param artifact_id: identifier of the artifact as found with an artifact search
:param analyzer_id: name of the analyzer used by the job
:rtype: json
"""
req = self.url + "/api/connector/cortex/job"
try:
data = json.dumps({ "cortexId": cortex_id,
"artifactId": artifact_id,
"analyzerId": analyzer_id
})
return requests.post(req, headers={'Content-Type': 'application/json'}, data=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise TheHiveException("Analyzer run error: {}".format(e))
def find_tasks(self, **attributes):
"""
:return: list of Tasks
:rtype: json
"""
return self.__find_rows("/api/case/task/_search", **attributes)
|
TheHive-Project/TheHive4py
|
thehive4py/api.py
|
TheHiveApi.get_linked_cases
|
python
|
def get_linked_cases(self, case_id):
req = self.url + "/api/case/{}/links".format(case_id)
try:
return requests.get(req, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseException("Linked cases fetch error: {}".format(e))
|
:param case_id: Case identifier
:return: TheHive case(s)
:rtype: json
|
train
|
https://github.com/TheHive-Project/TheHive4py/blob/35762bbd50d8376943268464326b59c752d6241b/thehive4py/api.py#L305-L316
| null |
class TheHiveApi:
"""
Python API for TheHive
:param url: thehive URL
:param principal: The username or the API key
:param password: The password for basic authentication or None. Defaults to None
"""
def __init__(self, url, principal, password=None, proxies={}, cert=True):
self.url = url
self.principal = principal
self.password = password
self.proxies = proxies
if self.password is not None:
self.auth = requests.auth.HTTPBasicAuth(self.principal,self.password)
else:
self.auth = BearerAuth(self.principal)
self.cert = cert
# Create a CaseHelper instance
self.case = CaseHelper(self)
def __find_rows(self, find_url, **attributes):
"""
:param find_url: URL of the find api
:type find_url: string
:return: The Response returned by requests including the list of documents based on find_url
:rtype: Response object
"""
req = self.url + find_url
# Add range and sort parameters
params = {
"range": attributes.get("range", "all"),
"sort": attributes.get("sort", [])
}
# Add body
data = {
"query": attributes.get("query", {})
}
try:
return requests.post(req, params=params, json=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise TheHiveException("Error: {}".format(e))
def do_patch(self, api_url, **attributes):
return requests.patch(self.url + api_url, headers={'Content-Type': 'application/json'}, json=attributes,
proxies=self.proxies, auth=self.auth, verify=self.cert)
def create_case(self, case):
"""
:param case: The case details
:type case: Case defined in models.py
:return: TheHive case
:rtype: json
"""
req = self.url + "/api/case"
data = case.jsonify()
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, data=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseException("Case create error: {}".format(e))
def update_case(self, case, fields=[]):
"""
Update a case.
:param case: The case to update. The case's `id` determines which case to update.
:param fields: Optional parameter, an array of fields names, the ones we want to update
:return:
"""
req = self.url + "/api/case/{}".format(case.id)
# Choose which attributes to send
update_keys = [
'title', 'description', 'severity', 'startDate', 'owner', 'flag', 'tlp', 'tags', 'status', 'resolutionStatus',
'impactStatus', 'summary', 'endDate', 'metrics', 'customFields'
]
data = {k: v for k, v in case.__dict__.items() if (len(fields) > 0 and k in fields) or (len(fields) == 0 and k in update_keys)}
try:
return requests.patch(req, headers={'Content-Type': 'application/json'}, json=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException:
raise CaseException("Case update error: {}".format(e))
def create_case_task(self, case_id, case_task):
"""
:param case_id: Case identifier
:param case_task: TheHive task
:type case_task: CaseTask defined in models.py
:return: TheHive task
:rtype: json
"""
req = self.url + "/api/case/{}/task".format(case_id)
data = case_task.jsonify()
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, data=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseTaskException("Case task create error: {}".format(e))
def update_case_task(self, task):
"""
:Updates TheHive Task
:param case: The task to update. The task's `id` determines which Task to update.
:return:
"""
req = self.url + "/api/case/task/{}".format(task.id)
# Choose which attributes to send
update_keys = [
'title', 'description', 'status', 'order', 'user', 'owner', 'flag', 'endDate'
]
data = {k: v for k, v in task.__dict__.items() if k in update_keys}
try:
return requests.patch(req, headers={'Content-Type': 'application/json'}, json=data,
proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseTaskException("Case task update error: {}".format(e))
def create_task_log(self, task_id, case_task_log):
"""
:param task_id: Task identifier
:param case_task_log: TheHive log
:type case_task_log: CaseTaskLog defined in models.py
:return: TheHive log
:rtype: json
"""
req = self.url + "/api/case/task/{}/log".format(task_id)
data = {'_json': json.dumps({"message":case_task_log.message})}
if case_task_log.file:
f = {'attachment': (os.path.basename(case_task_log.file), open(case_task_log.file, 'rb'), magic.Magic(mime=True).from_file(case_task_log.file))}
try:
return requests.post(req, data=data,files=f, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseTaskException("Case task log create error: {}".format(e))
else:
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, data=json.dumps({'message':case_task_log.message}), proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseTaskException("Case task log create error: {}".format(e))
def create_case_observable(self, case_id, case_observable):
"""
:param case_id: Case identifier
:param case_observable: TheHive observable
:type case_observable: CaseObservable defined in models.py
:return: TheHive observable
:rtype: json
"""
req = self.url + "/api/case/{}/artifact".format(case_id)
if case_observable.dataType == 'file':
try:
mesg = json.dumps({ "dataType": case_observable.dataType,
"message": case_observable.message,
"tlp": case_observable.tlp,
"tags": case_observable.tags,
"ioc": case_observable.ioc
})
data = {"_json": mesg}
return requests.post(req, data=data, files=case_observable.data[0], proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseObservableException("Case observable create error: {}".format(e))
else:
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, data=case_observable.jsonify(), proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseObservableException("Case observable create error: {}".format(e))
def get_case(self, case_id):
"""
:param case_id: Case identifier
:return: TheHive case
:rtype: json
"""
req = self.url + "/api/case/{}".format(case_id)
try:
return requests.get(req, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseException("Case fetch error: {}".format(e))
def find_cases(self, **attributes):
return self.__find_rows("/api/case/_search", **attributes)
def find_first(self, **attributes):
"""
:return: first case of result set given by query
:rtype: dict
"""
return self.find_cases(**attributes).json()[0]
def get_case_observables(self, case_id, **attributes):
"""
:param case_id: Case identifier
:return: list of observables
;rtype: json
"""
req = self.url + "/api/case/artifact/_search"
# Add range and sort parameters
params = {
"range": attributes.get("range", "all"),
"sort": attributes.get("sort", [])
}
# Add body
parent_criteria = Parent('case', Id(case_id))
# Append the custom query if specified
if "query" in attributes:
criteria = And(parent_criteria, attributes["query"])
else:
criteria = parent_criteria
data = {
"query": criteria
}
try:
return requests.post(req, params=params, json=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseObservableException("Case observables search error: {}".format(e))
def get_case_tasks(self, case_id, **attributes):
req = self.url + "/api/case/task/_search"
# Add range and sort parameters
params = {
"range": attributes.get("range", "all"),
"sort": attributes.get("sort", [])
}
# Add body
parent_criteria = Parent('case', Id(case_id))
# Append the custom query if specified
if "query" in attributes:
criteria = And(parent_criteria, attributes["query"])
else:
criteria = parent_criteria
data = {
"query": criteria
}
try:
return requests.post(req, params=params, json=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseTaskException("Case tasks search error: {}".format(e))
def find_case_templates(self, **attributes):
"""
:return: list of case templates
:rtype: json
"""
return self.__find_rows("/api/case/template/_search", **attributes)
def get_case_template(self, name):
"""
:param name: Case template name
:return: TheHive case template
:rtype: json
"""
req = self.url + "/api/case/template/_search"
data = {
"query": And(Eq("name", name), Eq("status", "Ok"))
}
try:
response = requests.post(req, json=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
json_response = response.json()
if response.status_code == 200 and len(json_response) > 0:
return response.json()[0]
else:
raise CaseTemplateException("Case template fetch error: Unable to find case template {}".format(name))
except requests.exceptions.RequestException as e:
raise CaseTemplateException("Case template fetch error: {}".format(e))
def get_task_logs(self, taskId):
"""
:param taskId: Task identifier
:type caseTaskLog: CaseTaskLog defined in models.py
:return: TheHive logs
:rtype: json
"""
req = self.url + "/api/case/task/{}/log".format(taskId)
try:
return requests.get(req, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseTaskException("Case task logs search error: {}".format(e))
def create_alert(self, alert):
"""
:param alert: TheHive alert
:type alert: Alert defined in models.py
:return: TheHive alert
:rtype: json
"""
req = self.url + "/api/alert"
data = alert.jsonify()
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, data=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise AlertException("Alert create error: {}".format(e))
def mark_alert_as_read(self, alert_id):
"""
Mark an alert as read.
:param alert_id: The ID of the alert to mark as read.
:return:
"""
req = self.url + "/api/alert/{}/markAsRead".format(alert_id)
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException:
raise AlertException("Mark alert as read error: {}".format(e))
def mark_alert_as_unread(self, alert_id):
"""
Mark an alert as unread.
:param alert_id: The ID of the alert to mark as unread.
:return:
"""
req = self.url + "/api/alert/{}/markAsUnread".format(alert_id)
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException:
raise AlertException("Mark alert as unread error: {}".format(e))
def update_alert(self, alert_id, alert, fields=[]):
"""
Update an alert.
:param alert_id: The ID of the alert to update.
:param data: The alert to update.
:param fields: Optional parameter, an array of fields names, the ones we want to update
:return:
"""
req = self.url + "/api/alert/{}".format(alert_id)
# update only the alert attributes that are not read-only
update_keys = ['tlp', 'severity', 'tags', 'caseTemplate', 'title', 'description']
data = {k: v for k, v in alert.__dict__.items() if
(len(fields) > 0 and k in fields) or (len(fields) == 0 and k in update_keys)}
if hasattr(alert, 'artifacts'):
data['artifacts'] = [a.__dict__ for a in alert.artifacts]
try:
return requests.patch(req, headers={'Content-Type': 'application/json'}, json=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException:
raise AlertException("Alert update error: {}".format(e))
def get_alert(self, alert_id):
"""
:param alert_id: Alert identifier
:return: TheHive Alert
:rtype: json
"""
req = self.url + "/api/alert/{}".format(alert_id)
try:
return requests.get(req, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise AlertException("Alert fetch error: {}".format(e))
def find_alerts(self, **attributes):
"""
:return: list of Alerts
:rtype: json
"""
return self.__find_rows("/api/alert/_search", **attributes)
def promote_alert_to_case(self, alert_id):
"""
This uses the TheHiveAPI to promote an alert to a case
:param alert_id: Alert identifier
:return: TheHive Case
:rtype: json
"""
req = self.url + "/api/alert/{}/createCase".format(alert_id)
try:
return requests.post(req, headers={'Content-Type': 'application/json'},
proxies=self.proxies, auth=self.auth,
verify=self.cert, data=json.dumps({}))
except requests.exceptions.RequestException as the_exception:
raise AlertException("Couldn't promote alert to case: {}".format(the_exception))
return None
def run_analyzer(self, cortex_id, artifact_id, analyzer_id):
"""
:param cortex_id: identifier of the Cortex server
:param artifact_id: identifier of the artifact as found with an artifact search
:param analyzer_id: name of the analyzer used by the job
:rtype: json
"""
req = self.url + "/api/connector/cortex/job"
try:
data = json.dumps({ "cortexId": cortex_id,
"artifactId": artifact_id,
"analyzerId": analyzer_id
})
return requests.post(req, headers={'Content-Type': 'application/json'}, data=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise TheHiveException("Analyzer run error: {}".format(e))
def find_tasks(self, **attributes):
"""
:return: list of Tasks
:rtype: json
"""
return self.__find_rows("/api/case/task/_search", **attributes)
|
TheHive-Project/TheHive4py
|
thehive4py/api.py
|
TheHiveApi.get_case_template
|
python
|
def get_case_template(self, name):
req = self.url + "/api/case/template/_search"
data = {
"query": And(Eq("name", name), Eq("status", "Ok"))
}
try:
response = requests.post(req, json=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
json_response = response.json()
if response.status_code == 200 and len(json_response) > 0:
return response.json()[0]
else:
raise CaseTemplateException("Case template fetch error: Unable to find case template {}".format(name))
except requests.exceptions.RequestException as e:
raise CaseTemplateException("Case template fetch error: {}".format(e))
|
:param name: Case template name
:return: TheHive case template
:rtype: json
|
train
|
https://github.com/TheHive-Project/TheHive4py/blob/35762bbd50d8376943268464326b59c752d6241b/thehive4py/api.py#L325-L348
|
[
"def And(*criteria):\n return {'_and': criteria}\n",
"def Eq(field, value):\n return {'_field': field, '_value': value}\n"
] |
class TheHiveApi:
"""
Python API for TheHive
:param url: thehive URL
:param principal: The username or the API key
:param password: The password for basic authentication or None. Defaults to None
"""
def __init__(self, url, principal, password=None, proxies={}, cert=True):
self.url = url
self.principal = principal
self.password = password
self.proxies = proxies
if self.password is not None:
self.auth = requests.auth.HTTPBasicAuth(self.principal,self.password)
else:
self.auth = BearerAuth(self.principal)
self.cert = cert
# Create a CaseHelper instance
self.case = CaseHelper(self)
def __find_rows(self, find_url, **attributes):
"""
:param find_url: URL of the find api
:type find_url: string
:return: The Response returned by requests including the list of documents based on find_url
:rtype: Response object
"""
req = self.url + find_url
# Add range and sort parameters
params = {
"range": attributes.get("range", "all"),
"sort": attributes.get("sort", [])
}
# Add body
data = {
"query": attributes.get("query", {})
}
try:
return requests.post(req, params=params, json=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise TheHiveException("Error: {}".format(e))
def do_patch(self, api_url, **attributes):
return requests.patch(self.url + api_url, headers={'Content-Type': 'application/json'}, json=attributes,
proxies=self.proxies, auth=self.auth, verify=self.cert)
def create_case(self, case):
"""
:param case: The case details
:type case: Case defined in models.py
:return: TheHive case
:rtype: json
"""
req = self.url + "/api/case"
data = case.jsonify()
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, data=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseException("Case create error: {}".format(e))
def update_case(self, case, fields=[]):
"""
Update a case.
:param case: The case to update. The case's `id` determines which case to update.
:param fields: Optional parameter, an array of fields names, the ones we want to update
:return:
"""
req = self.url + "/api/case/{}".format(case.id)
# Choose which attributes to send
update_keys = [
'title', 'description', 'severity', 'startDate', 'owner', 'flag', 'tlp', 'tags', 'status', 'resolutionStatus',
'impactStatus', 'summary', 'endDate', 'metrics', 'customFields'
]
data = {k: v for k, v in case.__dict__.items() if (len(fields) > 0 and k in fields) or (len(fields) == 0 and k in update_keys)}
try:
return requests.patch(req, headers={'Content-Type': 'application/json'}, json=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException:
raise CaseException("Case update error: {}".format(e))
def create_case_task(self, case_id, case_task):
"""
:param case_id: Case identifier
:param case_task: TheHive task
:type case_task: CaseTask defined in models.py
:return: TheHive task
:rtype: json
"""
req = self.url + "/api/case/{}/task".format(case_id)
data = case_task.jsonify()
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, data=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseTaskException("Case task create error: {}".format(e))
def update_case_task(self, task):
"""
:Updates TheHive Task
:param case: The task to update. The task's `id` determines which Task to update.
:return:
"""
req = self.url + "/api/case/task/{}".format(task.id)
# Choose which attributes to send
update_keys = [
'title', 'description', 'status', 'order', 'user', 'owner', 'flag', 'endDate'
]
data = {k: v for k, v in task.__dict__.items() if k in update_keys}
try:
return requests.patch(req, headers={'Content-Type': 'application/json'}, json=data,
proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseTaskException("Case task update error: {}".format(e))
def create_task_log(self, task_id, case_task_log):
"""
:param task_id: Task identifier
:param case_task_log: TheHive log
:type case_task_log: CaseTaskLog defined in models.py
:return: TheHive log
:rtype: json
"""
req = self.url + "/api/case/task/{}/log".format(task_id)
data = {'_json': json.dumps({"message":case_task_log.message})}
if case_task_log.file:
f = {'attachment': (os.path.basename(case_task_log.file), open(case_task_log.file, 'rb'), magic.Magic(mime=True).from_file(case_task_log.file))}
try:
return requests.post(req, data=data,files=f, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseTaskException("Case task log create error: {}".format(e))
else:
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, data=json.dumps({'message':case_task_log.message}), proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseTaskException("Case task log create error: {}".format(e))
def create_case_observable(self, case_id, case_observable):
"""
:param case_id: Case identifier
:param case_observable: TheHive observable
:type case_observable: CaseObservable defined in models.py
:return: TheHive observable
:rtype: json
"""
req = self.url + "/api/case/{}/artifact".format(case_id)
if case_observable.dataType == 'file':
try:
mesg = json.dumps({ "dataType": case_observable.dataType,
"message": case_observable.message,
"tlp": case_observable.tlp,
"tags": case_observable.tags,
"ioc": case_observable.ioc
})
data = {"_json": mesg}
return requests.post(req, data=data, files=case_observable.data[0], proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseObservableException("Case observable create error: {}".format(e))
else:
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, data=case_observable.jsonify(), proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseObservableException("Case observable create error: {}".format(e))
def get_case(self, case_id):
"""
:param case_id: Case identifier
:return: TheHive case
:rtype: json
"""
req = self.url + "/api/case/{}".format(case_id)
try:
return requests.get(req, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseException("Case fetch error: {}".format(e))
def find_cases(self, **attributes):
return self.__find_rows("/api/case/_search", **attributes)
def find_first(self, **attributes):
"""
:return: first case of result set given by query
:rtype: dict
"""
return self.find_cases(**attributes).json()[0]
def get_case_observables(self, case_id, **attributes):
"""
:param case_id: Case identifier
:return: list of observables
;rtype: json
"""
req = self.url + "/api/case/artifact/_search"
# Add range and sort parameters
params = {
"range": attributes.get("range", "all"),
"sort": attributes.get("sort", [])
}
# Add body
parent_criteria = Parent('case', Id(case_id))
# Append the custom query if specified
if "query" in attributes:
criteria = And(parent_criteria, attributes["query"])
else:
criteria = parent_criteria
data = {
"query": criteria
}
try:
return requests.post(req, params=params, json=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseObservableException("Case observables search error: {}".format(e))
def get_case_tasks(self, case_id, **attributes):
req = self.url + "/api/case/task/_search"
# Add range and sort parameters
params = {
"range": attributes.get("range", "all"),
"sort": attributes.get("sort", [])
}
# Add body
parent_criteria = Parent('case', Id(case_id))
# Append the custom query if specified
if "query" in attributes:
criteria = And(parent_criteria, attributes["query"])
else:
criteria = parent_criteria
data = {
"query": criteria
}
try:
return requests.post(req, params=params, json=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseTaskException("Case tasks search error: {}".format(e))
def get_linked_cases(self, case_id):
"""
:param case_id: Case identifier
:return: TheHive case(s)
:rtype: json
"""
req = self.url + "/api/case/{}/links".format(case_id)
try:
return requests.get(req, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseException("Linked cases fetch error: {}".format(e))
def find_case_templates(self, **attributes):
"""
:return: list of case templates
:rtype: json
"""
return self.__find_rows("/api/case/template/_search", **attributes)
def get_task_logs(self, taskId):
"""
:param taskId: Task identifier
:type caseTaskLog: CaseTaskLog defined in models.py
:return: TheHive logs
:rtype: json
"""
req = self.url + "/api/case/task/{}/log".format(taskId)
try:
return requests.get(req, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseTaskException("Case task logs search error: {}".format(e))
def create_alert(self, alert):
"""
:param alert: TheHive alert
:type alert: Alert defined in models.py
:return: TheHive alert
:rtype: json
"""
req = self.url + "/api/alert"
data = alert.jsonify()
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, data=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise AlertException("Alert create error: {}".format(e))
def mark_alert_as_read(self, alert_id):
"""
Mark an alert as read.
:param alert_id: The ID of the alert to mark as read.
:return:
"""
req = self.url + "/api/alert/{}/markAsRead".format(alert_id)
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException:
raise AlertException("Mark alert as read error: {}".format(e))
def mark_alert_as_unread(self, alert_id):
"""
Mark an alert as unread.
:param alert_id: The ID of the alert to mark as unread.
:return:
"""
req = self.url + "/api/alert/{}/markAsUnread".format(alert_id)
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException:
raise AlertException("Mark alert as unread error: {}".format(e))
def update_alert(self, alert_id, alert, fields=[]):
"""
Update an alert.
:param alert_id: The ID of the alert to update.
:param data: The alert to update.
:param fields: Optional parameter, an array of fields names, the ones we want to update
:return:
"""
req = self.url + "/api/alert/{}".format(alert_id)
# update only the alert attributes that are not read-only
update_keys = ['tlp', 'severity', 'tags', 'caseTemplate', 'title', 'description']
data = {k: v for k, v in alert.__dict__.items() if
(len(fields) > 0 and k in fields) or (len(fields) == 0 and k in update_keys)}
if hasattr(alert, 'artifacts'):
data['artifacts'] = [a.__dict__ for a in alert.artifacts]
try:
return requests.patch(req, headers={'Content-Type': 'application/json'}, json=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException:
raise AlertException("Alert update error: {}".format(e))
def get_alert(self, alert_id):
"""
:param alert_id: Alert identifier
:return: TheHive Alert
:rtype: json
"""
req = self.url + "/api/alert/{}".format(alert_id)
try:
return requests.get(req, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise AlertException("Alert fetch error: {}".format(e))
def find_alerts(self, **attributes):
"""
:return: list of Alerts
:rtype: json
"""
return self.__find_rows("/api/alert/_search", **attributes)
def promote_alert_to_case(self, alert_id):
"""
This uses the TheHiveAPI to promote an alert to a case
:param alert_id: Alert identifier
:return: TheHive Case
:rtype: json
"""
req = self.url + "/api/alert/{}/createCase".format(alert_id)
try:
return requests.post(req, headers={'Content-Type': 'application/json'},
proxies=self.proxies, auth=self.auth,
verify=self.cert, data=json.dumps({}))
except requests.exceptions.RequestException as the_exception:
raise AlertException("Couldn't promote alert to case: {}".format(the_exception))
return None
def run_analyzer(self, cortex_id, artifact_id, analyzer_id):
"""
:param cortex_id: identifier of the Cortex server
:param artifact_id: identifier of the artifact as found with an artifact search
:param analyzer_id: name of the analyzer used by the job
:rtype: json
"""
req = self.url + "/api/connector/cortex/job"
try:
data = json.dumps({ "cortexId": cortex_id,
"artifactId": artifact_id,
"analyzerId": analyzer_id
})
return requests.post(req, headers={'Content-Type': 'application/json'}, data=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise TheHiveException("Analyzer run error: {}".format(e))
def find_tasks(self, **attributes):
"""
:return: list of Tasks
:rtype: json
"""
return self.__find_rows("/api/case/task/_search", **attributes)
|
TheHive-Project/TheHive4py
|
thehive4py/api.py
|
TheHiveApi.get_task_logs
|
python
|
def get_task_logs(self, taskId):
req = self.url + "/api/case/task/{}/log".format(taskId)
try:
return requests.get(req, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseTaskException("Case task logs search error: {}".format(e))
|
:param taskId: Task identifier
:type caseTaskLog: CaseTaskLog defined in models.py
:return: TheHive logs
:rtype: json
|
train
|
https://github.com/TheHive-Project/TheHive4py/blob/35762bbd50d8376943268464326b59c752d6241b/thehive4py/api.py#L350-L363
| null |
class TheHiveApi:
"""
Python API for TheHive
:param url: thehive URL
:param principal: The username or the API key
:param password: The password for basic authentication or None. Defaults to None
"""
def __init__(self, url, principal, password=None, proxies={}, cert=True):
self.url = url
self.principal = principal
self.password = password
self.proxies = proxies
if self.password is not None:
self.auth = requests.auth.HTTPBasicAuth(self.principal,self.password)
else:
self.auth = BearerAuth(self.principal)
self.cert = cert
# Create a CaseHelper instance
self.case = CaseHelper(self)
def __find_rows(self, find_url, **attributes):
"""
:param find_url: URL of the find api
:type find_url: string
:return: The Response returned by requests including the list of documents based on find_url
:rtype: Response object
"""
req = self.url + find_url
# Add range and sort parameters
params = {
"range": attributes.get("range", "all"),
"sort": attributes.get("sort", [])
}
# Add body
data = {
"query": attributes.get("query", {})
}
try:
return requests.post(req, params=params, json=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise TheHiveException("Error: {}".format(e))
def do_patch(self, api_url, **attributes):
return requests.patch(self.url + api_url, headers={'Content-Type': 'application/json'}, json=attributes,
proxies=self.proxies, auth=self.auth, verify=self.cert)
def create_case(self, case):
"""
:param case: The case details
:type case: Case defined in models.py
:return: TheHive case
:rtype: json
"""
req = self.url + "/api/case"
data = case.jsonify()
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, data=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseException("Case create error: {}".format(e))
def update_case(self, case, fields=[]):
"""
Update a case.
:param case: The case to update. The case's `id` determines which case to update.
:param fields: Optional parameter, an array of fields names, the ones we want to update
:return:
"""
req = self.url + "/api/case/{}".format(case.id)
# Choose which attributes to send
update_keys = [
'title', 'description', 'severity', 'startDate', 'owner', 'flag', 'tlp', 'tags', 'status', 'resolutionStatus',
'impactStatus', 'summary', 'endDate', 'metrics', 'customFields'
]
data = {k: v for k, v in case.__dict__.items() if (len(fields) > 0 and k in fields) or (len(fields) == 0 and k in update_keys)}
try:
return requests.patch(req, headers={'Content-Type': 'application/json'}, json=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException:
raise CaseException("Case update error: {}".format(e))
def create_case_task(self, case_id, case_task):
"""
:param case_id: Case identifier
:param case_task: TheHive task
:type case_task: CaseTask defined in models.py
:return: TheHive task
:rtype: json
"""
req = self.url + "/api/case/{}/task".format(case_id)
data = case_task.jsonify()
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, data=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseTaskException("Case task create error: {}".format(e))
def update_case_task(self, task):
"""
:Updates TheHive Task
:param case: The task to update. The task's `id` determines which Task to update.
:return:
"""
req = self.url + "/api/case/task/{}".format(task.id)
# Choose which attributes to send
update_keys = [
'title', 'description', 'status', 'order', 'user', 'owner', 'flag', 'endDate'
]
data = {k: v for k, v in task.__dict__.items() if k in update_keys}
try:
return requests.patch(req, headers={'Content-Type': 'application/json'}, json=data,
proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseTaskException("Case task update error: {}".format(e))
def create_task_log(self, task_id, case_task_log):
"""
:param task_id: Task identifier
:param case_task_log: TheHive log
:type case_task_log: CaseTaskLog defined in models.py
:return: TheHive log
:rtype: json
"""
req = self.url + "/api/case/task/{}/log".format(task_id)
data = {'_json': json.dumps({"message":case_task_log.message})}
if case_task_log.file:
f = {'attachment': (os.path.basename(case_task_log.file), open(case_task_log.file, 'rb'), magic.Magic(mime=True).from_file(case_task_log.file))}
try:
return requests.post(req, data=data,files=f, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseTaskException("Case task log create error: {}".format(e))
else:
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, data=json.dumps({'message':case_task_log.message}), proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseTaskException("Case task log create error: {}".format(e))
def create_case_observable(self, case_id, case_observable):
"""
:param case_id: Case identifier
:param case_observable: TheHive observable
:type case_observable: CaseObservable defined in models.py
:return: TheHive observable
:rtype: json
"""
req = self.url + "/api/case/{}/artifact".format(case_id)
if case_observable.dataType == 'file':
try:
mesg = json.dumps({ "dataType": case_observable.dataType,
"message": case_observable.message,
"tlp": case_observable.tlp,
"tags": case_observable.tags,
"ioc": case_observable.ioc
})
data = {"_json": mesg}
return requests.post(req, data=data, files=case_observable.data[0], proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseObservableException("Case observable create error: {}".format(e))
else:
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, data=case_observable.jsonify(), proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseObservableException("Case observable create error: {}".format(e))
def get_case(self, case_id):
"""
:param case_id: Case identifier
:return: TheHive case
:rtype: json
"""
req = self.url + "/api/case/{}".format(case_id)
try:
return requests.get(req, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseException("Case fetch error: {}".format(e))
def find_cases(self, **attributes):
return self.__find_rows("/api/case/_search", **attributes)
def find_first(self, **attributes):
"""
:return: first case of result set given by query
:rtype: dict
"""
return self.find_cases(**attributes).json()[0]
def get_case_observables(self, case_id, **attributes):
"""
:param case_id: Case identifier
:return: list of observables
;rtype: json
"""
req = self.url + "/api/case/artifact/_search"
# Add range and sort parameters
params = {
"range": attributes.get("range", "all"),
"sort": attributes.get("sort", [])
}
# Add body
parent_criteria = Parent('case', Id(case_id))
# Append the custom query if specified
if "query" in attributes:
criteria = And(parent_criteria, attributes["query"])
else:
criteria = parent_criteria
data = {
"query": criteria
}
try:
return requests.post(req, params=params, json=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseObservableException("Case observables search error: {}".format(e))
def get_case_tasks(self, case_id, **attributes):
req = self.url + "/api/case/task/_search"
# Add range and sort parameters
params = {
"range": attributes.get("range", "all"),
"sort": attributes.get("sort", [])
}
# Add body
parent_criteria = Parent('case', Id(case_id))
# Append the custom query if specified
if "query" in attributes:
criteria = And(parent_criteria, attributes["query"])
else:
criteria = parent_criteria
data = {
"query": criteria
}
try:
return requests.post(req, params=params, json=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseTaskException("Case tasks search error: {}".format(e))
def get_linked_cases(self, case_id):
"""
:param case_id: Case identifier
:return: TheHive case(s)
:rtype: json
"""
req = self.url + "/api/case/{}/links".format(case_id)
try:
return requests.get(req, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseException("Linked cases fetch error: {}".format(e))
def find_case_templates(self, **attributes):
"""
:return: list of case templates
:rtype: json
"""
return self.__find_rows("/api/case/template/_search", **attributes)
def get_case_template(self, name):
"""
:param name: Case template name
:return: TheHive case template
:rtype: json
"""
req = self.url + "/api/case/template/_search"
data = {
"query": And(Eq("name", name), Eq("status", "Ok"))
}
try:
response = requests.post(req, json=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
json_response = response.json()
if response.status_code == 200 and len(json_response) > 0:
return response.json()[0]
else:
raise CaseTemplateException("Case template fetch error: Unable to find case template {}".format(name))
except requests.exceptions.RequestException as e:
raise CaseTemplateException("Case template fetch error: {}".format(e))
def create_alert(self, alert):
"""
:param alert: TheHive alert
:type alert: Alert defined in models.py
:return: TheHive alert
:rtype: json
"""
req = self.url + "/api/alert"
data = alert.jsonify()
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, data=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise AlertException("Alert create error: {}".format(e))
def mark_alert_as_read(self, alert_id):
"""
Mark an alert as read.
:param alert_id: The ID of the alert to mark as read.
:return:
"""
req = self.url + "/api/alert/{}/markAsRead".format(alert_id)
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException:
raise AlertException("Mark alert as read error: {}".format(e))
def mark_alert_as_unread(self, alert_id):
"""
Mark an alert as unread.
:param alert_id: The ID of the alert to mark as unread.
:return:
"""
req = self.url + "/api/alert/{}/markAsUnread".format(alert_id)
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException:
raise AlertException("Mark alert as unread error: {}".format(e))
def update_alert(self, alert_id, alert, fields=[]):
"""
Update an alert.
:param alert_id: The ID of the alert to update.
:param data: The alert to update.
:param fields: Optional parameter, an array of fields names, the ones we want to update
:return:
"""
req = self.url + "/api/alert/{}".format(alert_id)
# update only the alert attributes that are not read-only
update_keys = ['tlp', 'severity', 'tags', 'caseTemplate', 'title', 'description']
data = {k: v for k, v in alert.__dict__.items() if
(len(fields) > 0 and k in fields) or (len(fields) == 0 and k in update_keys)}
if hasattr(alert, 'artifacts'):
data['artifacts'] = [a.__dict__ for a in alert.artifacts]
try:
return requests.patch(req, headers={'Content-Type': 'application/json'}, json=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException:
raise AlertException("Alert update error: {}".format(e))
def get_alert(self, alert_id):
"""
:param alert_id: Alert identifier
:return: TheHive Alert
:rtype: json
"""
req = self.url + "/api/alert/{}".format(alert_id)
try:
return requests.get(req, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise AlertException("Alert fetch error: {}".format(e))
def find_alerts(self, **attributes):
"""
:return: list of Alerts
:rtype: json
"""
return self.__find_rows("/api/alert/_search", **attributes)
def promote_alert_to_case(self, alert_id):
"""
This uses the TheHiveAPI to promote an alert to a case
:param alert_id: Alert identifier
:return: TheHive Case
:rtype: json
"""
req = self.url + "/api/alert/{}/createCase".format(alert_id)
try:
return requests.post(req, headers={'Content-Type': 'application/json'},
proxies=self.proxies, auth=self.auth,
verify=self.cert, data=json.dumps({}))
except requests.exceptions.RequestException as the_exception:
raise AlertException("Couldn't promote alert to case: {}".format(the_exception))
return None
def run_analyzer(self, cortex_id, artifact_id, analyzer_id):
"""
:param cortex_id: identifier of the Cortex server
:param artifact_id: identifier of the artifact as found with an artifact search
:param analyzer_id: name of the analyzer used by the job
:rtype: json
"""
req = self.url + "/api/connector/cortex/job"
try:
data = json.dumps({ "cortexId": cortex_id,
"artifactId": artifact_id,
"analyzerId": analyzer_id
})
return requests.post(req, headers={'Content-Type': 'application/json'}, data=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise TheHiveException("Analyzer run error: {}".format(e))
def find_tasks(self, **attributes):
"""
:return: list of Tasks
:rtype: json
"""
return self.__find_rows("/api/case/task/_search", **attributes)
|
TheHive-Project/TheHive4py
|
thehive4py/api.py
|
TheHiveApi.create_alert
|
python
|
def create_alert(self, alert):
req = self.url + "/api/alert"
data = alert.jsonify()
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, data=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise AlertException("Alert create error: {}".format(e))
|
:param alert: TheHive alert
:type alert: Alert defined in models.py
:return: TheHive alert
:rtype: json
|
train
|
https://github.com/TheHive-Project/TheHive4py/blob/35762bbd50d8376943268464326b59c752d6241b/thehive4py/api.py#L365-L379
|
[
"def jsonify(self):\n return json.dumps(self, sort_keys=True, indent=4, cls=CustomJsonEncoder)\n"
] |
class TheHiveApi:
"""
Python API for TheHive
:param url: thehive URL
:param principal: The username or the API key
:param password: The password for basic authentication or None. Defaults to None
"""
def __init__(self, url, principal, password=None, proxies={}, cert=True):
self.url = url
self.principal = principal
self.password = password
self.proxies = proxies
if self.password is not None:
self.auth = requests.auth.HTTPBasicAuth(self.principal,self.password)
else:
self.auth = BearerAuth(self.principal)
self.cert = cert
# Create a CaseHelper instance
self.case = CaseHelper(self)
def __find_rows(self, find_url, **attributes):
"""
:param find_url: URL of the find api
:type find_url: string
:return: The Response returned by requests including the list of documents based on find_url
:rtype: Response object
"""
req = self.url + find_url
# Add range and sort parameters
params = {
"range": attributes.get("range", "all"),
"sort": attributes.get("sort", [])
}
# Add body
data = {
"query": attributes.get("query", {})
}
try:
return requests.post(req, params=params, json=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise TheHiveException("Error: {}".format(e))
def do_patch(self, api_url, **attributes):
return requests.patch(self.url + api_url, headers={'Content-Type': 'application/json'}, json=attributes,
proxies=self.proxies, auth=self.auth, verify=self.cert)
def create_case(self, case):
"""
:param case: The case details
:type case: Case defined in models.py
:return: TheHive case
:rtype: json
"""
req = self.url + "/api/case"
data = case.jsonify()
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, data=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseException("Case create error: {}".format(e))
def update_case(self, case, fields=[]):
"""
Update a case.
:param case: The case to update. The case's `id` determines which case to update.
:param fields: Optional parameter, an array of fields names, the ones we want to update
:return:
"""
req = self.url + "/api/case/{}".format(case.id)
# Choose which attributes to send
update_keys = [
'title', 'description', 'severity', 'startDate', 'owner', 'flag', 'tlp', 'tags', 'status', 'resolutionStatus',
'impactStatus', 'summary', 'endDate', 'metrics', 'customFields'
]
data = {k: v for k, v in case.__dict__.items() if (len(fields) > 0 and k in fields) or (len(fields) == 0 and k in update_keys)}
try:
return requests.patch(req, headers={'Content-Type': 'application/json'}, json=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException:
raise CaseException("Case update error: {}".format(e))
def create_case_task(self, case_id, case_task):
"""
:param case_id: Case identifier
:param case_task: TheHive task
:type case_task: CaseTask defined in models.py
:return: TheHive task
:rtype: json
"""
req = self.url + "/api/case/{}/task".format(case_id)
data = case_task.jsonify()
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, data=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseTaskException("Case task create error: {}".format(e))
def update_case_task(self, task):
"""
:Updates TheHive Task
:param case: The task to update. The task's `id` determines which Task to update.
:return:
"""
req = self.url + "/api/case/task/{}".format(task.id)
# Choose which attributes to send
update_keys = [
'title', 'description', 'status', 'order', 'user', 'owner', 'flag', 'endDate'
]
data = {k: v for k, v in task.__dict__.items() if k in update_keys}
try:
return requests.patch(req, headers={'Content-Type': 'application/json'}, json=data,
proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseTaskException("Case task update error: {}".format(e))
def create_task_log(self, task_id, case_task_log):
"""
:param task_id: Task identifier
:param case_task_log: TheHive log
:type case_task_log: CaseTaskLog defined in models.py
:return: TheHive log
:rtype: json
"""
req = self.url + "/api/case/task/{}/log".format(task_id)
data = {'_json': json.dumps({"message":case_task_log.message})}
if case_task_log.file:
f = {'attachment': (os.path.basename(case_task_log.file), open(case_task_log.file, 'rb'), magic.Magic(mime=True).from_file(case_task_log.file))}
try:
return requests.post(req, data=data,files=f, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseTaskException("Case task log create error: {}".format(e))
else:
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, data=json.dumps({'message':case_task_log.message}), proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseTaskException("Case task log create error: {}".format(e))
def create_case_observable(self, case_id, case_observable):
"""
:param case_id: Case identifier
:param case_observable: TheHive observable
:type case_observable: CaseObservable defined in models.py
:return: TheHive observable
:rtype: json
"""
req = self.url + "/api/case/{}/artifact".format(case_id)
if case_observable.dataType == 'file':
try:
mesg = json.dumps({ "dataType": case_observable.dataType,
"message": case_observable.message,
"tlp": case_observable.tlp,
"tags": case_observable.tags,
"ioc": case_observable.ioc
})
data = {"_json": mesg}
return requests.post(req, data=data, files=case_observable.data[0], proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseObservableException("Case observable create error: {}".format(e))
else:
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, data=case_observable.jsonify(), proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseObservableException("Case observable create error: {}".format(e))
def get_case(self, case_id):
"""
:param case_id: Case identifier
:return: TheHive case
:rtype: json
"""
req = self.url + "/api/case/{}".format(case_id)
try:
return requests.get(req, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseException("Case fetch error: {}".format(e))
def find_cases(self, **attributes):
return self.__find_rows("/api/case/_search", **attributes)
def find_first(self, **attributes):
"""
:return: first case of result set given by query
:rtype: dict
"""
return self.find_cases(**attributes).json()[0]
def get_case_observables(self, case_id, **attributes):
"""
:param case_id: Case identifier
:return: list of observables
;rtype: json
"""
req = self.url + "/api/case/artifact/_search"
# Add range and sort parameters
params = {
"range": attributes.get("range", "all"),
"sort": attributes.get("sort", [])
}
# Add body
parent_criteria = Parent('case', Id(case_id))
# Append the custom query if specified
if "query" in attributes:
criteria = And(parent_criteria, attributes["query"])
else:
criteria = parent_criteria
data = {
"query": criteria
}
try:
return requests.post(req, params=params, json=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseObservableException("Case observables search error: {}".format(e))
def get_case_tasks(self, case_id, **attributes):
req = self.url + "/api/case/task/_search"
# Add range and sort parameters
params = {
"range": attributes.get("range", "all"),
"sort": attributes.get("sort", [])
}
# Add body
parent_criteria = Parent('case', Id(case_id))
# Append the custom query if specified
if "query" in attributes:
criteria = And(parent_criteria, attributes["query"])
else:
criteria = parent_criteria
data = {
"query": criteria
}
try:
return requests.post(req, params=params, json=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseTaskException("Case tasks search error: {}".format(e))
def get_linked_cases(self, case_id):
"""
:param case_id: Case identifier
:return: TheHive case(s)
:rtype: json
"""
req = self.url + "/api/case/{}/links".format(case_id)
try:
return requests.get(req, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseException("Linked cases fetch error: {}".format(e))
def find_case_templates(self, **attributes):
"""
:return: list of case templates
:rtype: json
"""
return self.__find_rows("/api/case/template/_search", **attributes)
def get_case_template(self, name):
"""
:param name: Case template name
:return: TheHive case template
:rtype: json
"""
req = self.url + "/api/case/template/_search"
data = {
"query": And(Eq("name", name), Eq("status", "Ok"))
}
try:
response = requests.post(req, json=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
json_response = response.json()
if response.status_code == 200 and len(json_response) > 0:
return response.json()[0]
else:
raise CaseTemplateException("Case template fetch error: Unable to find case template {}".format(name))
except requests.exceptions.RequestException as e:
raise CaseTemplateException("Case template fetch error: {}".format(e))
def get_task_logs(self, taskId):
"""
:param taskId: Task identifier
:type caseTaskLog: CaseTaskLog defined in models.py
:return: TheHive logs
:rtype: json
"""
req = self.url + "/api/case/task/{}/log".format(taskId)
try:
return requests.get(req, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseTaskException("Case task logs search error: {}".format(e))
def mark_alert_as_read(self, alert_id):
"""
Mark an alert as read.
:param alert_id: The ID of the alert to mark as read.
:return:
"""
req = self.url + "/api/alert/{}/markAsRead".format(alert_id)
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException:
raise AlertException("Mark alert as read error: {}".format(e))
def mark_alert_as_unread(self, alert_id):
"""
Mark an alert as unread.
:param alert_id: The ID of the alert to mark as unread.
:return:
"""
req = self.url + "/api/alert/{}/markAsUnread".format(alert_id)
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException:
raise AlertException("Mark alert as unread error: {}".format(e))
def update_alert(self, alert_id, alert, fields=[]):
"""
Update an alert.
:param alert_id: The ID of the alert to update.
:param data: The alert to update.
:param fields: Optional parameter, an array of fields names, the ones we want to update
:return:
"""
req = self.url + "/api/alert/{}".format(alert_id)
# update only the alert attributes that are not read-only
update_keys = ['tlp', 'severity', 'tags', 'caseTemplate', 'title', 'description']
data = {k: v for k, v in alert.__dict__.items() if
(len(fields) > 0 and k in fields) or (len(fields) == 0 and k in update_keys)}
if hasattr(alert, 'artifacts'):
data['artifacts'] = [a.__dict__ for a in alert.artifacts]
try:
return requests.patch(req, headers={'Content-Type': 'application/json'}, json=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException:
raise AlertException("Alert update error: {}".format(e))
def get_alert(self, alert_id):
"""
:param alert_id: Alert identifier
:return: TheHive Alert
:rtype: json
"""
req = self.url + "/api/alert/{}".format(alert_id)
try:
return requests.get(req, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise AlertException("Alert fetch error: {}".format(e))
def find_alerts(self, **attributes):
"""
:return: list of Alerts
:rtype: json
"""
return self.__find_rows("/api/alert/_search", **attributes)
def promote_alert_to_case(self, alert_id):
"""
This uses the TheHiveAPI to promote an alert to a case
:param alert_id: Alert identifier
:return: TheHive Case
:rtype: json
"""
req = self.url + "/api/alert/{}/createCase".format(alert_id)
try:
return requests.post(req, headers={'Content-Type': 'application/json'},
proxies=self.proxies, auth=self.auth,
verify=self.cert, data=json.dumps({}))
except requests.exceptions.RequestException as the_exception:
raise AlertException("Couldn't promote alert to case: {}".format(the_exception))
return None
def run_analyzer(self, cortex_id, artifact_id, analyzer_id):
"""
:param cortex_id: identifier of the Cortex server
:param artifact_id: identifier of the artifact as found with an artifact search
:param analyzer_id: name of the analyzer used by the job
:rtype: json
"""
req = self.url + "/api/connector/cortex/job"
try:
data = json.dumps({ "cortexId": cortex_id,
"artifactId": artifact_id,
"analyzerId": analyzer_id
})
return requests.post(req, headers={'Content-Type': 'application/json'}, data=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise TheHiveException("Analyzer run error: {}".format(e))
def find_tasks(self, **attributes):
"""
:return: list of Tasks
:rtype: json
"""
return self.__find_rows("/api/case/task/_search", **attributes)
|
TheHive-Project/TheHive4py
|
thehive4py/api.py
|
TheHiveApi.mark_alert_as_read
|
python
|
def mark_alert_as_read(self, alert_id):
req = self.url + "/api/alert/{}/markAsRead".format(alert_id)
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException:
raise AlertException("Mark alert as read error: {}".format(e))
|
Mark an alert as read.
:param alert_id: The ID of the alert to mark as read.
:return:
|
train
|
https://github.com/TheHive-Project/TheHive4py/blob/35762bbd50d8376943268464326b59c752d6241b/thehive4py/api.py#L381-L392
| null |
class TheHiveApi:
"""
Python API for TheHive
:param url: thehive URL
:param principal: The username or the API key
:param password: The password for basic authentication or None. Defaults to None
"""
def __init__(self, url, principal, password=None, proxies={}, cert=True):
self.url = url
self.principal = principal
self.password = password
self.proxies = proxies
if self.password is not None:
self.auth = requests.auth.HTTPBasicAuth(self.principal,self.password)
else:
self.auth = BearerAuth(self.principal)
self.cert = cert
# Create a CaseHelper instance
self.case = CaseHelper(self)
def __find_rows(self, find_url, **attributes):
"""
:param find_url: URL of the find api
:type find_url: string
:return: The Response returned by requests including the list of documents based on find_url
:rtype: Response object
"""
req = self.url + find_url
# Add range and sort parameters
params = {
"range": attributes.get("range", "all"),
"sort": attributes.get("sort", [])
}
# Add body
data = {
"query": attributes.get("query", {})
}
try:
return requests.post(req, params=params, json=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise TheHiveException("Error: {}".format(e))
def do_patch(self, api_url, **attributes):
return requests.patch(self.url + api_url, headers={'Content-Type': 'application/json'}, json=attributes,
proxies=self.proxies, auth=self.auth, verify=self.cert)
def create_case(self, case):
"""
:param case: The case details
:type case: Case defined in models.py
:return: TheHive case
:rtype: json
"""
req = self.url + "/api/case"
data = case.jsonify()
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, data=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseException("Case create error: {}".format(e))
def update_case(self, case, fields=[]):
"""
Update a case.
:param case: The case to update. The case's `id` determines which case to update.
:param fields: Optional parameter, an array of fields names, the ones we want to update
:return:
"""
req = self.url + "/api/case/{}".format(case.id)
# Choose which attributes to send
update_keys = [
'title', 'description', 'severity', 'startDate', 'owner', 'flag', 'tlp', 'tags', 'status', 'resolutionStatus',
'impactStatus', 'summary', 'endDate', 'metrics', 'customFields'
]
data = {k: v for k, v in case.__dict__.items() if (len(fields) > 0 and k in fields) or (len(fields) == 0 and k in update_keys)}
try:
return requests.patch(req, headers={'Content-Type': 'application/json'}, json=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException:
raise CaseException("Case update error: {}".format(e))
def create_case_task(self, case_id, case_task):
"""
:param case_id: Case identifier
:param case_task: TheHive task
:type case_task: CaseTask defined in models.py
:return: TheHive task
:rtype: json
"""
req = self.url + "/api/case/{}/task".format(case_id)
data = case_task.jsonify()
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, data=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseTaskException("Case task create error: {}".format(e))
def update_case_task(self, task):
"""
:Updates TheHive Task
:param case: The task to update. The task's `id` determines which Task to update.
:return:
"""
req = self.url + "/api/case/task/{}".format(task.id)
# Choose which attributes to send
update_keys = [
'title', 'description', 'status', 'order', 'user', 'owner', 'flag', 'endDate'
]
data = {k: v for k, v in task.__dict__.items() if k in update_keys}
try:
return requests.patch(req, headers={'Content-Type': 'application/json'}, json=data,
proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseTaskException("Case task update error: {}".format(e))
def create_task_log(self, task_id, case_task_log):
"""
:param task_id: Task identifier
:param case_task_log: TheHive log
:type case_task_log: CaseTaskLog defined in models.py
:return: TheHive log
:rtype: json
"""
req = self.url + "/api/case/task/{}/log".format(task_id)
data = {'_json': json.dumps({"message":case_task_log.message})}
if case_task_log.file:
f = {'attachment': (os.path.basename(case_task_log.file), open(case_task_log.file, 'rb'), magic.Magic(mime=True).from_file(case_task_log.file))}
try:
return requests.post(req, data=data,files=f, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseTaskException("Case task log create error: {}".format(e))
else:
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, data=json.dumps({'message':case_task_log.message}), proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseTaskException("Case task log create error: {}".format(e))
def create_case_observable(self, case_id, case_observable):
"""
:param case_id: Case identifier
:param case_observable: TheHive observable
:type case_observable: CaseObservable defined in models.py
:return: TheHive observable
:rtype: json
"""
req = self.url + "/api/case/{}/artifact".format(case_id)
if case_observable.dataType == 'file':
try:
mesg = json.dumps({ "dataType": case_observable.dataType,
"message": case_observable.message,
"tlp": case_observable.tlp,
"tags": case_observable.tags,
"ioc": case_observable.ioc
})
data = {"_json": mesg}
return requests.post(req, data=data, files=case_observable.data[0], proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseObservableException("Case observable create error: {}".format(e))
else:
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, data=case_observable.jsonify(), proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseObservableException("Case observable create error: {}".format(e))
def get_case(self, case_id):
"""
:param case_id: Case identifier
:return: TheHive case
:rtype: json
"""
req = self.url + "/api/case/{}".format(case_id)
try:
return requests.get(req, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseException("Case fetch error: {}".format(e))
def find_cases(self, **attributes):
return self.__find_rows("/api/case/_search", **attributes)
def find_first(self, **attributes):
"""
:return: first case of result set given by query
:rtype: dict
"""
return self.find_cases(**attributes).json()[0]
def get_case_observables(self, case_id, **attributes):
"""
:param case_id: Case identifier
:return: list of observables
;rtype: json
"""
req = self.url + "/api/case/artifact/_search"
# Add range and sort parameters
params = {
"range": attributes.get("range", "all"),
"sort": attributes.get("sort", [])
}
# Add body
parent_criteria = Parent('case', Id(case_id))
# Append the custom query if specified
if "query" in attributes:
criteria = And(parent_criteria, attributes["query"])
else:
criteria = parent_criteria
data = {
"query": criteria
}
try:
return requests.post(req, params=params, json=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseObservableException("Case observables search error: {}".format(e))
def get_case_tasks(self, case_id, **attributes):
req = self.url + "/api/case/task/_search"
# Add range and sort parameters
params = {
"range": attributes.get("range", "all"),
"sort": attributes.get("sort", [])
}
# Add body
parent_criteria = Parent('case', Id(case_id))
# Append the custom query if specified
if "query" in attributes:
criteria = And(parent_criteria, attributes["query"])
else:
criteria = parent_criteria
data = {
"query": criteria
}
try:
return requests.post(req, params=params, json=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseTaskException("Case tasks search error: {}".format(e))
def get_linked_cases(self, case_id):
"""
:param case_id: Case identifier
:return: TheHive case(s)
:rtype: json
"""
req = self.url + "/api/case/{}/links".format(case_id)
try:
return requests.get(req, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseException("Linked cases fetch error: {}".format(e))
def find_case_templates(self, **attributes):
"""
:return: list of case templates
:rtype: json
"""
return self.__find_rows("/api/case/template/_search", **attributes)
def get_case_template(self, name):
"""
:param name: Case template name
:return: TheHive case template
:rtype: json
"""
req = self.url + "/api/case/template/_search"
data = {
"query": And(Eq("name", name), Eq("status", "Ok"))
}
try:
response = requests.post(req, json=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
json_response = response.json()
if response.status_code == 200 and len(json_response) > 0:
return response.json()[0]
else:
raise CaseTemplateException("Case template fetch error: Unable to find case template {}".format(name))
except requests.exceptions.RequestException as e:
raise CaseTemplateException("Case template fetch error: {}".format(e))
def get_task_logs(self, taskId):
"""
:param taskId: Task identifier
:type caseTaskLog: CaseTaskLog defined in models.py
:return: TheHive logs
:rtype: json
"""
req = self.url + "/api/case/task/{}/log".format(taskId)
try:
return requests.get(req, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseTaskException("Case task logs search error: {}".format(e))
def create_alert(self, alert):
"""
:param alert: TheHive alert
:type alert: Alert defined in models.py
:return: TheHive alert
:rtype: json
"""
req = self.url + "/api/alert"
data = alert.jsonify()
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, data=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise AlertException("Alert create error: {}".format(e))
def mark_alert_as_unread(self, alert_id):
"""
Mark an alert as unread.
:param alert_id: The ID of the alert to mark as unread.
:return:
"""
req = self.url + "/api/alert/{}/markAsUnread".format(alert_id)
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException:
raise AlertException("Mark alert as unread error: {}".format(e))
def update_alert(self, alert_id, alert, fields=[]):
"""
Update an alert.
:param alert_id: The ID of the alert to update.
:param data: The alert to update.
:param fields: Optional parameter, an array of fields names, the ones we want to update
:return:
"""
req = self.url + "/api/alert/{}".format(alert_id)
# update only the alert attributes that are not read-only
update_keys = ['tlp', 'severity', 'tags', 'caseTemplate', 'title', 'description']
data = {k: v for k, v in alert.__dict__.items() if
(len(fields) > 0 and k in fields) or (len(fields) == 0 and k in update_keys)}
if hasattr(alert, 'artifacts'):
data['artifacts'] = [a.__dict__ for a in alert.artifacts]
try:
return requests.patch(req, headers={'Content-Type': 'application/json'}, json=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException:
raise AlertException("Alert update error: {}".format(e))
def get_alert(self, alert_id):
"""
:param alert_id: Alert identifier
:return: TheHive Alert
:rtype: json
"""
req = self.url + "/api/alert/{}".format(alert_id)
try:
return requests.get(req, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise AlertException("Alert fetch error: {}".format(e))
def find_alerts(self, **attributes):
"""
:return: list of Alerts
:rtype: json
"""
return self.__find_rows("/api/alert/_search", **attributes)
def promote_alert_to_case(self, alert_id):
"""
This uses the TheHiveAPI to promote an alert to a case
:param alert_id: Alert identifier
:return: TheHive Case
:rtype: json
"""
req = self.url + "/api/alert/{}/createCase".format(alert_id)
try:
return requests.post(req, headers={'Content-Type': 'application/json'},
proxies=self.proxies, auth=self.auth,
verify=self.cert, data=json.dumps({}))
except requests.exceptions.RequestException as the_exception:
raise AlertException("Couldn't promote alert to case: {}".format(the_exception))
return None
def run_analyzer(self, cortex_id, artifact_id, analyzer_id):
"""
:param cortex_id: identifier of the Cortex server
:param artifact_id: identifier of the artifact as found with an artifact search
:param analyzer_id: name of the analyzer used by the job
:rtype: json
"""
req = self.url + "/api/connector/cortex/job"
try:
data = json.dumps({ "cortexId": cortex_id,
"artifactId": artifact_id,
"analyzerId": analyzer_id
})
return requests.post(req, headers={'Content-Type': 'application/json'}, data=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise TheHiveException("Analyzer run error: {}".format(e))
def find_tasks(self, **attributes):
"""
:return: list of Tasks
:rtype: json
"""
return self.__find_rows("/api/case/task/_search", **attributes)
|
TheHive-Project/TheHive4py
|
thehive4py/api.py
|
TheHiveApi.update_alert
|
python
|
def update_alert(self, alert_id, alert, fields=[]):
req = self.url + "/api/alert/{}".format(alert_id)
# update only the alert attributes that are not read-only
update_keys = ['tlp', 'severity', 'tags', 'caseTemplate', 'title', 'description']
data = {k: v for k, v in alert.__dict__.items() if
(len(fields) > 0 and k in fields) or (len(fields) == 0 and k in update_keys)}
if hasattr(alert, 'artifacts'):
data['artifacts'] = [a.__dict__ for a in alert.artifacts]
try:
return requests.patch(req, headers={'Content-Type': 'application/json'}, json=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException:
raise AlertException("Alert update error: {}".format(e))
|
Update an alert.
:param alert_id: The ID of the alert to update.
:param data: The alert to update.
:param fields: Optional parameter, an array of fields names, the ones we want to update
:return:
|
train
|
https://github.com/TheHive-Project/TheHive4py/blob/35762bbd50d8376943268464326b59c752d6241b/thehive4py/api.py#L407-L428
| null |
class TheHiveApi:
"""
Python API for TheHive
:param url: thehive URL
:param principal: The username or the API key
:param password: The password for basic authentication or None. Defaults to None
"""
def __init__(self, url, principal, password=None, proxies={}, cert=True):
self.url = url
self.principal = principal
self.password = password
self.proxies = proxies
if self.password is not None:
self.auth = requests.auth.HTTPBasicAuth(self.principal,self.password)
else:
self.auth = BearerAuth(self.principal)
self.cert = cert
# Create a CaseHelper instance
self.case = CaseHelper(self)
def __find_rows(self, find_url, **attributes):
"""
:param find_url: URL of the find api
:type find_url: string
:return: The Response returned by requests including the list of documents based on find_url
:rtype: Response object
"""
req = self.url + find_url
# Add range and sort parameters
params = {
"range": attributes.get("range", "all"),
"sort": attributes.get("sort", [])
}
# Add body
data = {
"query": attributes.get("query", {})
}
try:
return requests.post(req, params=params, json=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise TheHiveException("Error: {}".format(e))
def do_patch(self, api_url, **attributes):
return requests.patch(self.url + api_url, headers={'Content-Type': 'application/json'}, json=attributes,
proxies=self.proxies, auth=self.auth, verify=self.cert)
def create_case(self, case):
"""
:param case: The case details
:type case: Case defined in models.py
:return: TheHive case
:rtype: json
"""
req = self.url + "/api/case"
data = case.jsonify()
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, data=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseException("Case create error: {}".format(e))
def update_case(self, case, fields=[]):
"""
Update a case.
:param case: The case to update. The case's `id` determines which case to update.
:param fields: Optional parameter, an array of fields names, the ones we want to update
:return:
"""
req = self.url + "/api/case/{}".format(case.id)
# Choose which attributes to send
update_keys = [
'title', 'description', 'severity', 'startDate', 'owner', 'flag', 'tlp', 'tags', 'status', 'resolutionStatus',
'impactStatus', 'summary', 'endDate', 'metrics', 'customFields'
]
data = {k: v for k, v in case.__dict__.items() if (len(fields) > 0 and k in fields) or (len(fields) == 0 and k in update_keys)}
try:
return requests.patch(req, headers={'Content-Type': 'application/json'}, json=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException:
raise CaseException("Case update error: {}".format(e))
def create_case_task(self, case_id, case_task):
"""
:param case_id: Case identifier
:param case_task: TheHive task
:type case_task: CaseTask defined in models.py
:return: TheHive task
:rtype: json
"""
req = self.url + "/api/case/{}/task".format(case_id)
data = case_task.jsonify()
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, data=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseTaskException("Case task create error: {}".format(e))
def update_case_task(self, task):
"""
:Updates TheHive Task
:param case: The task to update. The task's `id` determines which Task to update.
:return:
"""
req = self.url + "/api/case/task/{}".format(task.id)
# Choose which attributes to send
update_keys = [
'title', 'description', 'status', 'order', 'user', 'owner', 'flag', 'endDate'
]
data = {k: v for k, v in task.__dict__.items() if k in update_keys}
try:
return requests.patch(req, headers={'Content-Type': 'application/json'}, json=data,
proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseTaskException("Case task update error: {}".format(e))
def create_task_log(self, task_id, case_task_log):
"""
:param task_id: Task identifier
:param case_task_log: TheHive log
:type case_task_log: CaseTaskLog defined in models.py
:return: TheHive log
:rtype: json
"""
req = self.url + "/api/case/task/{}/log".format(task_id)
data = {'_json': json.dumps({"message":case_task_log.message})}
if case_task_log.file:
f = {'attachment': (os.path.basename(case_task_log.file), open(case_task_log.file, 'rb'), magic.Magic(mime=True).from_file(case_task_log.file))}
try:
return requests.post(req, data=data,files=f, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseTaskException("Case task log create error: {}".format(e))
else:
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, data=json.dumps({'message':case_task_log.message}), proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseTaskException("Case task log create error: {}".format(e))
def create_case_observable(self, case_id, case_observable):
"""
:param case_id: Case identifier
:param case_observable: TheHive observable
:type case_observable: CaseObservable defined in models.py
:return: TheHive observable
:rtype: json
"""
req = self.url + "/api/case/{}/artifact".format(case_id)
if case_observable.dataType == 'file':
try:
mesg = json.dumps({ "dataType": case_observable.dataType,
"message": case_observable.message,
"tlp": case_observable.tlp,
"tags": case_observable.tags,
"ioc": case_observable.ioc
})
data = {"_json": mesg}
return requests.post(req, data=data, files=case_observable.data[0], proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseObservableException("Case observable create error: {}".format(e))
else:
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, data=case_observable.jsonify(), proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseObservableException("Case observable create error: {}".format(e))
def get_case(self, case_id):
"""
:param case_id: Case identifier
:return: TheHive case
:rtype: json
"""
req = self.url + "/api/case/{}".format(case_id)
try:
return requests.get(req, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseException("Case fetch error: {}".format(e))
def find_cases(self, **attributes):
return self.__find_rows("/api/case/_search", **attributes)
def find_first(self, **attributes):
"""
:return: first case of result set given by query
:rtype: dict
"""
return self.find_cases(**attributes).json()[0]
def get_case_observables(self, case_id, **attributes):
"""
:param case_id: Case identifier
:return: list of observables
;rtype: json
"""
req = self.url + "/api/case/artifact/_search"
# Add range and sort parameters
params = {
"range": attributes.get("range", "all"),
"sort": attributes.get("sort", [])
}
# Add body
parent_criteria = Parent('case', Id(case_id))
# Append the custom query if specified
if "query" in attributes:
criteria = And(parent_criteria, attributes["query"])
else:
criteria = parent_criteria
data = {
"query": criteria
}
try:
return requests.post(req, params=params, json=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseObservableException("Case observables search error: {}".format(e))
def get_case_tasks(self, case_id, **attributes):
req = self.url + "/api/case/task/_search"
# Add range and sort parameters
params = {
"range": attributes.get("range", "all"),
"sort": attributes.get("sort", [])
}
# Add body
parent_criteria = Parent('case', Id(case_id))
# Append the custom query if specified
if "query" in attributes:
criteria = And(parent_criteria, attributes["query"])
else:
criteria = parent_criteria
data = {
"query": criteria
}
try:
return requests.post(req, params=params, json=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseTaskException("Case tasks search error: {}".format(e))
def get_linked_cases(self, case_id):
"""
:param case_id: Case identifier
:return: TheHive case(s)
:rtype: json
"""
req = self.url + "/api/case/{}/links".format(case_id)
try:
return requests.get(req, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseException("Linked cases fetch error: {}".format(e))
def find_case_templates(self, **attributes):
"""
:return: list of case templates
:rtype: json
"""
return self.__find_rows("/api/case/template/_search", **attributes)
def get_case_template(self, name):
"""
:param name: Case template name
:return: TheHive case template
:rtype: json
"""
req = self.url + "/api/case/template/_search"
data = {
"query": And(Eq("name", name), Eq("status", "Ok"))
}
try:
response = requests.post(req, json=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
json_response = response.json()
if response.status_code == 200 and len(json_response) > 0:
return response.json()[0]
else:
raise CaseTemplateException("Case template fetch error: Unable to find case template {}".format(name))
except requests.exceptions.RequestException as e:
raise CaseTemplateException("Case template fetch error: {}".format(e))
def get_task_logs(self, taskId):
"""
:param taskId: Task identifier
:type caseTaskLog: CaseTaskLog defined in models.py
:return: TheHive logs
:rtype: json
"""
req = self.url + "/api/case/task/{}/log".format(taskId)
try:
return requests.get(req, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseTaskException("Case task logs search error: {}".format(e))
def create_alert(self, alert):
"""
:param alert: TheHive alert
:type alert: Alert defined in models.py
:return: TheHive alert
:rtype: json
"""
req = self.url + "/api/alert"
data = alert.jsonify()
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, data=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise AlertException("Alert create error: {}".format(e))
def mark_alert_as_read(self, alert_id):
"""
Mark an alert as read.
:param alert_id: The ID of the alert to mark as read.
:return:
"""
req = self.url + "/api/alert/{}/markAsRead".format(alert_id)
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException:
raise AlertException("Mark alert as read error: {}".format(e))
def mark_alert_as_unread(self, alert_id):
"""
Mark an alert as unread.
:param alert_id: The ID of the alert to mark as unread.
:return:
"""
req = self.url + "/api/alert/{}/markAsUnread".format(alert_id)
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException:
raise AlertException("Mark alert as unread error: {}".format(e))
def get_alert(self, alert_id):
"""
:param alert_id: Alert identifier
:return: TheHive Alert
:rtype: json
"""
req = self.url + "/api/alert/{}".format(alert_id)
try:
return requests.get(req, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise AlertException("Alert fetch error: {}".format(e))
def find_alerts(self, **attributes):
"""
:return: list of Alerts
:rtype: json
"""
return self.__find_rows("/api/alert/_search", **attributes)
def promote_alert_to_case(self, alert_id):
"""
This uses the TheHiveAPI to promote an alert to a case
:param alert_id: Alert identifier
:return: TheHive Case
:rtype: json
"""
req = self.url + "/api/alert/{}/createCase".format(alert_id)
try:
return requests.post(req, headers={'Content-Type': 'application/json'},
proxies=self.proxies, auth=self.auth,
verify=self.cert, data=json.dumps({}))
except requests.exceptions.RequestException as the_exception:
raise AlertException("Couldn't promote alert to case: {}".format(the_exception))
return None
def run_analyzer(self, cortex_id, artifact_id, analyzer_id):
"""
:param cortex_id: identifier of the Cortex server
:param artifact_id: identifier of the artifact as found with an artifact search
:param analyzer_id: name of the analyzer used by the job
:rtype: json
"""
req = self.url + "/api/connector/cortex/job"
try:
data = json.dumps({ "cortexId": cortex_id,
"artifactId": artifact_id,
"analyzerId": analyzer_id
})
return requests.post(req, headers={'Content-Type': 'application/json'}, data=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise TheHiveException("Analyzer run error: {}".format(e))
def find_tasks(self, **attributes):
"""
:return: list of Tasks
:rtype: json
"""
return self.__find_rows("/api/case/task/_search", **attributes)
|
TheHive-Project/TheHive4py
|
thehive4py/api.py
|
TheHiveApi.get_alert
|
python
|
def get_alert(self, alert_id):
req = self.url + "/api/alert/{}".format(alert_id)
try:
return requests.get(req, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise AlertException("Alert fetch error: {}".format(e))
|
:param alert_id: Alert identifier
:return: TheHive Alert
:rtype: json
|
train
|
https://github.com/TheHive-Project/TheHive4py/blob/35762bbd50d8376943268464326b59c752d6241b/thehive4py/api.py#L430-L441
| null |
class TheHiveApi:
"""
Python API for TheHive
:param url: thehive URL
:param principal: The username or the API key
:param password: The password for basic authentication or None. Defaults to None
"""
def __init__(self, url, principal, password=None, proxies={}, cert=True):
self.url = url
self.principal = principal
self.password = password
self.proxies = proxies
if self.password is not None:
self.auth = requests.auth.HTTPBasicAuth(self.principal,self.password)
else:
self.auth = BearerAuth(self.principal)
self.cert = cert
# Create a CaseHelper instance
self.case = CaseHelper(self)
def __find_rows(self, find_url, **attributes):
"""
:param find_url: URL of the find api
:type find_url: string
:return: The Response returned by requests including the list of documents based on find_url
:rtype: Response object
"""
req = self.url + find_url
# Add range and sort parameters
params = {
"range": attributes.get("range", "all"),
"sort": attributes.get("sort", [])
}
# Add body
data = {
"query": attributes.get("query", {})
}
try:
return requests.post(req, params=params, json=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise TheHiveException("Error: {}".format(e))
def do_patch(self, api_url, **attributes):
return requests.patch(self.url + api_url, headers={'Content-Type': 'application/json'}, json=attributes,
proxies=self.proxies, auth=self.auth, verify=self.cert)
def create_case(self, case):
"""
:param case: The case details
:type case: Case defined in models.py
:return: TheHive case
:rtype: json
"""
req = self.url + "/api/case"
data = case.jsonify()
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, data=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseException("Case create error: {}".format(e))
def update_case(self, case, fields=[]):
"""
Update a case.
:param case: The case to update. The case's `id` determines which case to update.
:param fields: Optional parameter, an array of fields names, the ones we want to update
:return:
"""
req = self.url + "/api/case/{}".format(case.id)
# Choose which attributes to send
update_keys = [
'title', 'description', 'severity', 'startDate', 'owner', 'flag', 'tlp', 'tags', 'status', 'resolutionStatus',
'impactStatus', 'summary', 'endDate', 'metrics', 'customFields'
]
data = {k: v for k, v in case.__dict__.items() if (len(fields) > 0 and k in fields) or (len(fields) == 0 and k in update_keys)}
try:
return requests.patch(req, headers={'Content-Type': 'application/json'}, json=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException:
raise CaseException("Case update error: {}".format(e))
def create_case_task(self, case_id, case_task):
"""
:param case_id: Case identifier
:param case_task: TheHive task
:type case_task: CaseTask defined in models.py
:return: TheHive task
:rtype: json
"""
req = self.url + "/api/case/{}/task".format(case_id)
data = case_task.jsonify()
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, data=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseTaskException("Case task create error: {}".format(e))
def update_case_task(self, task):
"""
:Updates TheHive Task
:param case: The task to update. The task's `id` determines which Task to update.
:return:
"""
req = self.url + "/api/case/task/{}".format(task.id)
# Choose which attributes to send
update_keys = [
'title', 'description', 'status', 'order', 'user', 'owner', 'flag', 'endDate'
]
data = {k: v for k, v in task.__dict__.items() if k in update_keys}
try:
return requests.patch(req, headers={'Content-Type': 'application/json'}, json=data,
proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseTaskException("Case task update error: {}".format(e))
def create_task_log(self, task_id, case_task_log):
"""
:param task_id: Task identifier
:param case_task_log: TheHive log
:type case_task_log: CaseTaskLog defined in models.py
:return: TheHive log
:rtype: json
"""
req = self.url + "/api/case/task/{}/log".format(task_id)
data = {'_json': json.dumps({"message":case_task_log.message})}
if case_task_log.file:
f = {'attachment': (os.path.basename(case_task_log.file), open(case_task_log.file, 'rb'), magic.Magic(mime=True).from_file(case_task_log.file))}
try:
return requests.post(req, data=data,files=f, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseTaskException("Case task log create error: {}".format(e))
else:
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, data=json.dumps({'message':case_task_log.message}), proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseTaskException("Case task log create error: {}".format(e))
def create_case_observable(self, case_id, case_observable):
"""
:param case_id: Case identifier
:param case_observable: TheHive observable
:type case_observable: CaseObservable defined in models.py
:return: TheHive observable
:rtype: json
"""
req = self.url + "/api/case/{}/artifact".format(case_id)
if case_observable.dataType == 'file':
try:
mesg = json.dumps({ "dataType": case_observable.dataType,
"message": case_observable.message,
"tlp": case_observable.tlp,
"tags": case_observable.tags,
"ioc": case_observable.ioc
})
data = {"_json": mesg}
return requests.post(req, data=data, files=case_observable.data[0], proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseObservableException("Case observable create error: {}".format(e))
else:
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, data=case_observable.jsonify(), proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseObservableException("Case observable create error: {}".format(e))
def get_case(self, case_id):
"""
:param case_id: Case identifier
:return: TheHive case
:rtype: json
"""
req = self.url + "/api/case/{}".format(case_id)
try:
return requests.get(req, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseException("Case fetch error: {}".format(e))
def find_cases(self, **attributes):
return self.__find_rows("/api/case/_search", **attributes)
def find_first(self, **attributes):
"""
:return: first case of result set given by query
:rtype: dict
"""
return self.find_cases(**attributes).json()[0]
def get_case_observables(self, case_id, **attributes):
"""
:param case_id: Case identifier
:return: list of observables
;rtype: json
"""
req = self.url + "/api/case/artifact/_search"
# Add range and sort parameters
params = {
"range": attributes.get("range", "all"),
"sort": attributes.get("sort", [])
}
# Add body
parent_criteria = Parent('case', Id(case_id))
# Append the custom query if specified
if "query" in attributes:
criteria = And(parent_criteria, attributes["query"])
else:
criteria = parent_criteria
data = {
"query": criteria
}
try:
return requests.post(req, params=params, json=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseObservableException("Case observables search error: {}".format(e))
def get_case_tasks(self, case_id, **attributes):
req = self.url + "/api/case/task/_search"
# Add range and sort parameters
params = {
"range": attributes.get("range", "all"),
"sort": attributes.get("sort", [])
}
# Add body
parent_criteria = Parent('case', Id(case_id))
# Append the custom query if specified
if "query" in attributes:
criteria = And(parent_criteria, attributes["query"])
else:
criteria = parent_criteria
data = {
"query": criteria
}
try:
return requests.post(req, params=params, json=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseTaskException("Case tasks search error: {}".format(e))
def get_linked_cases(self, case_id):
"""
:param case_id: Case identifier
:return: TheHive case(s)
:rtype: json
"""
req = self.url + "/api/case/{}/links".format(case_id)
try:
return requests.get(req, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseException("Linked cases fetch error: {}".format(e))
def find_case_templates(self, **attributes):
"""
:return: list of case templates
:rtype: json
"""
return self.__find_rows("/api/case/template/_search", **attributes)
def get_case_template(self, name):
"""
:param name: Case template name
:return: TheHive case template
:rtype: json
"""
req = self.url + "/api/case/template/_search"
data = {
"query": And(Eq("name", name), Eq("status", "Ok"))
}
try:
response = requests.post(req, json=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
json_response = response.json()
if response.status_code == 200 and len(json_response) > 0:
return response.json()[0]
else:
raise CaseTemplateException("Case template fetch error: Unable to find case template {}".format(name))
except requests.exceptions.RequestException as e:
raise CaseTemplateException("Case template fetch error: {}".format(e))
def get_task_logs(self, taskId):
"""
:param taskId: Task identifier
:type caseTaskLog: CaseTaskLog defined in models.py
:return: TheHive logs
:rtype: json
"""
req = self.url + "/api/case/task/{}/log".format(taskId)
try:
return requests.get(req, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseTaskException("Case task logs search error: {}".format(e))
def create_alert(self, alert):
"""
:param alert: TheHive alert
:type alert: Alert defined in models.py
:return: TheHive alert
:rtype: json
"""
req = self.url + "/api/alert"
data = alert.jsonify()
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, data=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise AlertException("Alert create error: {}".format(e))
def mark_alert_as_read(self, alert_id):
"""
Mark an alert as read.
:param alert_id: The ID of the alert to mark as read.
:return:
"""
req = self.url + "/api/alert/{}/markAsRead".format(alert_id)
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException:
raise AlertException("Mark alert as read error: {}".format(e))
def mark_alert_as_unread(self, alert_id):
"""
Mark an alert as unread.
:param alert_id: The ID of the alert to mark as unread.
:return:
"""
req = self.url + "/api/alert/{}/markAsUnread".format(alert_id)
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException:
raise AlertException("Mark alert as unread error: {}".format(e))
def update_alert(self, alert_id, alert, fields=[]):
"""
Update an alert.
:param alert_id: The ID of the alert to update.
:param data: The alert to update.
:param fields: Optional parameter, an array of fields names, the ones we want to update
:return:
"""
req = self.url + "/api/alert/{}".format(alert_id)
# update only the alert attributes that are not read-only
update_keys = ['tlp', 'severity', 'tags', 'caseTemplate', 'title', 'description']
data = {k: v for k, v in alert.__dict__.items() if
(len(fields) > 0 and k in fields) or (len(fields) == 0 and k in update_keys)}
if hasattr(alert, 'artifacts'):
data['artifacts'] = [a.__dict__ for a in alert.artifacts]
try:
return requests.patch(req, headers={'Content-Type': 'application/json'}, json=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException:
raise AlertException("Alert update error: {}".format(e))
def find_alerts(self, **attributes):
"""
:return: list of Alerts
:rtype: json
"""
return self.__find_rows("/api/alert/_search", **attributes)
def promote_alert_to_case(self, alert_id):
"""
This uses the TheHiveAPI to promote an alert to a case
:param alert_id: Alert identifier
:return: TheHive Case
:rtype: json
"""
req = self.url + "/api/alert/{}/createCase".format(alert_id)
try:
return requests.post(req, headers={'Content-Type': 'application/json'},
proxies=self.proxies, auth=self.auth,
verify=self.cert, data=json.dumps({}))
except requests.exceptions.RequestException as the_exception:
raise AlertException("Couldn't promote alert to case: {}".format(the_exception))
return None
def run_analyzer(self, cortex_id, artifact_id, analyzer_id):
"""
:param cortex_id: identifier of the Cortex server
:param artifact_id: identifier of the artifact as found with an artifact search
:param analyzer_id: name of the analyzer used by the job
:rtype: json
"""
req = self.url + "/api/connector/cortex/job"
try:
data = json.dumps({ "cortexId": cortex_id,
"artifactId": artifact_id,
"analyzerId": analyzer_id
})
return requests.post(req, headers={'Content-Type': 'application/json'}, data=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise TheHiveException("Analyzer run error: {}".format(e))
def find_tasks(self, **attributes):
"""
:return: list of Tasks
:rtype: json
"""
return self.__find_rows("/api/case/task/_search", **attributes)
|
TheHive-Project/TheHive4py
|
thehive4py/api.py
|
TheHiveApi.promote_alert_to_case
|
python
|
def promote_alert_to_case(self, alert_id):
req = self.url + "/api/alert/{}/createCase".format(alert_id)
try:
return requests.post(req, headers={'Content-Type': 'application/json'},
proxies=self.proxies, auth=self.auth,
verify=self.cert, data=json.dumps({}))
except requests.exceptions.RequestException as the_exception:
raise AlertException("Couldn't promote alert to case: {}".format(the_exception))
return None
|
This uses the TheHiveAPI to promote an alert to a case
:param alert_id: Alert identifier
:return: TheHive Case
:rtype: json
|
train
|
https://github.com/TheHive-Project/TheHive4py/blob/35762bbd50d8376943268464326b59c752d6241b/thehive4py/api.py#L451-L470
| null |
class TheHiveApi:
"""
Python API for TheHive
:param url: thehive URL
:param principal: The username or the API key
:param password: The password for basic authentication or None. Defaults to None
"""
def __init__(self, url, principal, password=None, proxies={}, cert=True):
self.url = url
self.principal = principal
self.password = password
self.proxies = proxies
if self.password is not None:
self.auth = requests.auth.HTTPBasicAuth(self.principal,self.password)
else:
self.auth = BearerAuth(self.principal)
self.cert = cert
# Create a CaseHelper instance
self.case = CaseHelper(self)
def __find_rows(self, find_url, **attributes):
"""
:param find_url: URL of the find api
:type find_url: string
:return: The Response returned by requests including the list of documents based on find_url
:rtype: Response object
"""
req = self.url + find_url
# Add range and sort parameters
params = {
"range": attributes.get("range", "all"),
"sort": attributes.get("sort", [])
}
# Add body
data = {
"query": attributes.get("query", {})
}
try:
return requests.post(req, params=params, json=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise TheHiveException("Error: {}".format(e))
def do_patch(self, api_url, **attributes):
return requests.patch(self.url + api_url, headers={'Content-Type': 'application/json'}, json=attributes,
proxies=self.proxies, auth=self.auth, verify=self.cert)
def create_case(self, case):
"""
:param case: The case details
:type case: Case defined in models.py
:return: TheHive case
:rtype: json
"""
req = self.url + "/api/case"
data = case.jsonify()
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, data=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseException("Case create error: {}".format(e))
def update_case(self, case, fields=[]):
"""
Update a case.
:param case: The case to update. The case's `id` determines which case to update.
:param fields: Optional parameter, an array of fields names, the ones we want to update
:return:
"""
req = self.url + "/api/case/{}".format(case.id)
# Choose which attributes to send
update_keys = [
'title', 'description', 'severity', 'startDate', 'owner', 'flag', 'tlp', 'tags', 'status', 'resolutionStatus',
'impactStatus', 'summary', 'endDate', 'metrics', 'customFields'
]
data = {k: v for k, v in case.__dict__.items() if (len(fields) > 0 and k in fields) or (len(fields) == 0 and k in update_keys)}
try:
return requests.patch(req, headers={'Content-Type': 'application/json'}, json=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException:
raise CaseException("Case update error: {}".format(e))
def create_case_task(self, case_id, case_task):
"""
:param case_id: Case identifier
:param case_task: TheHive task
:type case_task: CaseTask defined in models.py
:return: TheHive task
:rtype: json
"""
req = self.url + "/api/case/{}/task".format(case_id)
data = case_task.jsonify()
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, data=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseTaskException("Case task create error: {}".format(e))
def update_case_task(self, task):
"""
:Updates TheHive Task
:param case: The task to update. The task's `id` determines which Task to update.
:return:
"""
req = self.url + "/api/case/task/{}".format(task.id)
# Choose which attributes to send
update_keys = [
'title', 'description', 'status', 'order', 'user', 'owner', 'flag', 'endDate'
]
data = {k: v for k, v in task.__dict__.items() if k in update_keys}
try:
return requests.patch(req, headers={'Content-Type': 'application/json'}, json=data,
proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseTaskException("Case task update error: {}".format(e))
def create_task_log(self, task_id, case_task_log):
"""
:param task_id: Task identifier
:param case_task_log: TheHive log
:type case_task_log: CaseTaskLog defined in models.py
:return: TheHive log
:rtype: json
"""
req = self.url + "/api/case/task/{}/log".format(task_id)
data = {'_json': json.dumps({"message":case_task_log.message})}
if case_task_log.file:
f = {'attachment': (os.path.basename(case_task_log.file), open(case_task_log.file, 'rb'), magic.Magic(mime=True).from_file(case_task_log.file))}
try:
return requests.post(req, data=data,files=f, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseTaskException("Case task log create error: {}".format(e))
else:
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, data=json.dumps({'message':case_task_log.message}), proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseTaskException("Case task log create error: {}".format(e))
def create_case_observable(self, case_id, case_observable):
"""
:param case_id: Case identifier
:param case_observable: TheHive observable
:type case_observable: CaseObservable defined in models.py
:return: TheHive observable
:rtype: json
"""
req = self.url + "/api/case/{}/artifact".format(case_id)
if case_observable.dataType == 'file':
try:
mesg = json.dumps({ "dataType": case_observable.dataType,
"message": case_observable.message,
"tlp": case_observable.tlp,
"tags": case_observable.tags,
"ioc": case_observable.ioc
})
data = {"_json": mesg}
return requests.post(req, data=data, files=case_observable.data[0], proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseObservableException("Case observable create error: {}".format(e))
else:
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, data=case_observable.jsonify(), proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseObservableException("Case observable create error: {}".format(e))
def get_case(self, case_id):
"""
:param case_id: Case identifier
:return: TheHive case
:rtype: json
"""
req = self.url + "/api/case/{}".format(case_id)
try:
return requests.get(req, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseException("Case fetch error: {}".format(e))
def find_cases(self, **attributes):
return self.__find_rows("/api/case/_search", **attributes)
def find_first(self, **attributes):
"""
:return: first case of result set given by query
:rtype: dict
"""
return self.find_cases(**attributes).json()[0]
def get_case_observables(self, case_id, **attributes):
"""
:param case_id: Case identifier
:return: list of observables
;rtype: json
"""
req = self.url + "/api/case/artifact/_search"
# Add range and sort parameters
params = {
"range": attributes.get("range", "all"),
"sort": attributes.get("sort", [])
}
# Add body
parent_criteria = Parent('case', Id(case_id))
# Append the custom query if specified
if "query" in attributes:
criteria = And(parent_criteria, attributes["query"])
else:
criteria = parent_criteria
data = {
"query": criteria
}
try:
return requests.post(req, params=params, json=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseObservableException("Case observables search error: {}".format(e))
def get_case_tasks(self, case_id, **attributes):
req = self.url + "/api/case/task/_search"
# Add range and sort parameters
params = {
"range": attributes.get("range", "all"),
"sort": attributes.get("sort", [])
}
# Add body
parent_criteria = Parent('case', Id(case_id))
# Append the custom query if specified
if "query" in attributes:
criteria = And(parent_criteria, attributes["query"])
else:
criteria = parent_criteria
data = {
"query": criteria
}
try:
return requests.post(req, params=params, json=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseTaskException("Case tasks search error: {}".format(e))
def get_linked_cases(self, case_id):
"""
:param case_id: Case identifier
:return: TheHive case(s)
:rtype: json
"""
req = self.url + "/api/case/{}/links".format(case_id)
try:
return requests.get(req, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseException("Linked cases fetch error: {}".format(e))
def find_case_templates(self, **attributes):
"""
:return: list of case templates
:rtype: json
"""
return self.__find_rows("/api/case/template/_search", **attributes)
def get_case_template(self, name):
"""
:param name: Case template name
:return: TheHive case template
:rtype: json
"""
req = self.url + "/api/case/template/_search"
data = {
"query": And(Eq("name", name), Eq("status", "Ok"))
}
try:
response = requests.post(req, json=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
json_response = response.json()
if response.status_code == 200 and len(json_response) > 0:
return response.json()[0]
else:
raise CaseTemplateException("Case template fetch error: Unable to find case template {}".format(name))
except requests.exceptions.RequestException as e:
raise CaseTemplateException("Case template fetch error: {}".format(e))
def get_task_logs(self, taskId):
"""
:param taskId: Task identifier
:type caseTaskLog: CaseTaskLog defined in models.py
:return: TheHive logs
:rtype: json
"""
req = self.url + "/api/case/task/{}/log".format(taskId)
try:
return requests.get(req, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseTaskException("Case task logs search error: {}".format(e))
def create_alert(self, alert):
"""
:param alert: TheHive alert
:type alert: Alert defined in models.py
:return: TheHive alert
:rtype: json
"""
req = self.url + "/api/alert"
data = alert.jsonify()
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, data=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise AlertException("Alert create error: {}".format(e))
def mark_alert_as_read(self, alert_id):
"""
Mark an alert as read.
:param alert_id: The ID of the alert to mark as read.
:return:
"""
req = self.url + "/api/alert/{}/markAsRead".format(alert_id)
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException:
raise AlertException("Mark alert as read error: {}".format(e))
def mark_alert_as_unread(self, alert_id):
"""
Mark an alert as unread.
:param alert_id: The ID of the alert to mark as unread.
:return:
"""
req = self.url + "/api/alert/{}/markAsUnread".format(alert_id)
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException:
raise AlertException("Mark alert as unread error: {}".format(e))
def update_alert(self, alert_id, alert, fields=[]):
"""
Update an alert.
:param alert_id: The ID of the alert to update.
:param data: The alert to update.
:param fields: Optional parameter, an array of fields names, the ones we want to update
:return:
"""
req = self.url + "/api/alert/{}".format(alert_id)
# update only the alert attributes that are not read-only
update_keys = ['tlp', 'severity', 'tags', 'caseTemplate', 'title', 'description']
data = {k: v for k, v in alert.__dict__.items() if
(len(fields) > 0 and k in fields) or (len(fields) == 0 and k in update_keys)}
if hasattr(alert, 'artifacts'):
data['artifacts'] = [a.__dict__ for a in alert.artifacts]
try:
return requests.patch(req, headers={'Content-Type': 'application/json'}, json=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException:
raise AlertException("Alert update error: {}".format(e))
def get_alert(self, alert_id):
"""
:param alert_id: Alert identifier
:return: TheHive Alert
:rtype: json
"""
req = self.url + "/api/alert/{}".format(alert_id)
try:
return requests.get(req, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise AlertException("Alert fetch error: {}".format(e))
def find_alerts(self, **attributes):
"""
:return: list of Alerts
:rtype: json
"""
return self.__find_rows("/api/alert/_search", **attributes)
def run_analyzer(self, cortex_id, artifact_id, analyzer_id):
"""
:param cortex_id: identifier of the Cortex server
:param artifact_id: identifier of the artifact as found with an artifact search
:param analyzer_id: name of the analyzer used by the job
:rtype: json
"""
req = self.url + "/api/connector/cortex/job"
try:
data = json.dumps({ "cortexId": cortex_id,
"artifactId": artifact_id,
"analyzerId": analyzer_id
})
return requests.post(req, headers={'Content-Type': 'application/json'}, data=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise TheHiveException("Analyzer run error: {}".format(e))
def find_tasks(self, **attributes):
"""
:return: list of Tasks
:rtype: json
"""
return self.__find_rows("/api/case/task/_search", **attributes)
|
TheHive-Project/TheHive4py
|
thehive4py/api.py
|
TheHiveApi.run_analyzer
|
python
|
def run_analyzer(self, cortex_id, artifact_id, analyzer_id):
req = self.url + "/api/connector/cortex/job"
try:
data = json.dumps({ "cortexId": cortex_id,
"artifactId": artifact_id,
"analyzerId": analyzer_id
})
return requests.post(req, headers={'Content-Type': 'application/json'}, data=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise TheHiveException("Analyzer run error: {}".format(e))
|
:param cortex_id: identifier of the Cortex server
:param artifact_id: identifier of the artifact as found with an artifact search
:param analyzer_id: name of the analyzer used by the job
:rtype: json
|
train
|
https://github.com/TheHive-Project/TheHive4py/blob/35762bbd50d8376943268464326b59c752d6241b/thehive4py/api.py#L472-L490
| null |
class TheHiveApi:
"""
Python API for TheHive
:param url: thehive URL
:param principal: The username or the API key
:param password: The password for basic authentication or None. Defaults to None
"""
def __init__(self, url, principal, password=None, proxies={}, cert=True):
self.url = url
self.principal = principal
self.password = password
self.proxies = proxies
if self.password is not None:
self.auth = requests.auth.HTTPBasicAuth(self.principal,self.password)
else:
self.auth = BearerAuth(self.principal)
self.cert = cert
# Create a CaseHelper instance
self.case = CaseHelper(self)
def __find_rows(self, find_url, **attributes):
"""
:param find_url: URL of the find api
:type find_url: string
:return: The Response returned by requests including the list of documents based on find_url
:rtype: Response object
"""
req = self.url + find_url
# Add range and sort parameters
params = {
"range": attributes.get("range", "all"),
"sort": attributes.get("sort", [])
}
# Add body
data = {
"query": attributes.get("query", {})
}
try:
return requests.post(req, params=params, json=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise TheHiveException("Error: {}".format(e))
def do_patch(self, api_url, **attributes):
return requests.patch(self.url + api_url, headers={'Content-Type': 'application/json'}, json=attributes,
proxies=self.proxies, auth=self.auth, verify=self.cert)
def create_case(self, case):
"""
:param case: The case details
:type case: Case defined in models.py
:return: TheHive case
:rtype: json
"""
req = self.url + "/api/case"
data = case.jsonify()
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, data=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseException("Case create error: {}".format(e))
def update_case(self, case, fields=[]):
"""
Update a case.
:param case: The case to update. The case's `id` determines which case to update.
:param fields: Optional parameter, an array of fields names, the ones we want to update
:return:
"""
req = self.url + "/api/case/{}".format(case.id)
# Choose which attributes to send
update_keys = [
'title', 'description', 'severity', 'startDate', 'owner', 'flag', 'tlp', 'tags', 'status', 'resolutionStatus',
'impactStatus', 'summary', 'endDate', 'metrics', 'customFields'
]
data = {k: v for k, v in case.__dict__.items() if (len(fields) > 0 and k in fields) or (len(fields) == 0 and k in update_keys)}
try:
return requests.patch(req, headers={'Content-Type': 'application/json'}, json=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException:
raise CaseException("Case update error: {}".format(e))
def create_case_task(self, case_id, case_task):
"""
:param case_id: Case identifier
:param case_task: TheHive task
:type case_task: CaseTask defined in models.py
:return: TheHive task
:rtype: json
"""
req = self.url + "/api/case/{}/task".format(case_id)
data = case_task.jsonify()
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, data=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseTaskException("Case task create error: {}".format(e))
def update_case_task(self, task):
"""
:Updates TheHive Task
:param case: The task to update. The task's `id` determines which Task to update.
:return:
"""
req = self.url + "/api/case/task/{}".format(task.id)
# Choose which attributes to send
update_keys = [
'title', 'description', 'status', 'order', 'user', 'owner', 'flag', 'endDate'
]
data = {k: v for k, v in task.__dict__.items() if k in update_keys}
try:
return requests.patch(req, headers={'Content-Type': 'application/json'}, json=data,
proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseTaskException("Case task update error: {}".format(e))
def create_task_log(self, task_id, case_task_log):
"""
:param task_id: Task identifier
:param case_task_log: TheHive log
:type case_task_log: CaseTaskLog defined in models.py
:return: TheHive log
:rtype: json
"""
req = self.url + "/api/case/task/{}/log".format(task_id)
data = {'_json': json.dumps({"message":case_task_log.message})}
if case_task_log.file:
f = {'attachment': (os.path.basename(case_task_log.file), open(case_task_log.file, 'rb'), magic.Magic(mime=True).from_file(case_task_log.file))}
try:
return requests.post(req, data=data,files=f, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseTaskException("Case task log create error: {}".format(e))
else:
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, data=json.dumps({'message':case_task_log.message}), proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseTaskException("Case task log create error: {}".format(e))
def create_case_observable(self, case_id, case_observable):
"""
:param case_id: Case identifier
:param case_observable: TheHive observable
:type case_observable: CaseObservable defined in models.py
:return: TheHive observable
:rtype: json
"""
req = self.url + "/api/case/{}/artifact".format(case_id)
if case_observable.dataType == 'file':
try:
mesg = json.dumps({ "dataType": case_observable.dataType,
"message": case_observable.message,
"tlp": case_observable.tlp,
"tags": case_observable.tags,
"ioc": case_observable.ioc
})
data = {"_json": mesg}
return requests.post(req, data=data, files=case_observable.data[0], proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseObservableException("Case observable create error: {}".format(e))
else:
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, data=case_observable.jsonify(), proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseObservableException("Case observable create error: {}".format(e))
def get_case(self, case_id):
"""
:param case_id: Case identifier
:return: TheHive case
:rtype: json
"""
req = self.url + "/api/case/{}".format(case_id)
try:
return requests.get(req, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseException("Case fetch error: {}".format(e))
def find_cases(self, **attributes):
return self.__find_rows("/api/case/_search", **attributes)
def find_first(self, **attributes):
"""
:return: first case of result set given by query
:rtype: dict
"""
return self.find_cases(**attributes).json()[0]
def get_case_observables(self, case_id, **attributes):
"""
:param case_id: Case identifier
:return: list of observables
;rtype: json
"""
req = self.url + "/api/case/artifact/_search"
# Add range and sort parameters
params = {
"range": attributes.get("range", "all"),
"sort": attributes.get("sort", [])
}
# Add body
parent_criteria = Parent('case', Id(case_id))
# Append the custom query if specified
if "query" in attributes:
criteria = And(parent_criteria, attributes["query"])
else:
criteria = parent_criteria
data = {
"query": criteria
}
try:
return requests.post(req, params=params, json=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseObservableException("Case observables search error: {}".format(e))
def get_case_tasks(self, case_id, **attributes):
req = self.url + "/api/case/task/_search"
# Add range and sort parameters
params = {
"range": attributes.get("range", "all"),
"sort": attributes.get("sort", [])
}
# Add body
parent_criteria = Parent('case', Id(case_id))
# Append the custom query if specified
if "query" in attributes:
criteria = And(parent_criteria, attributes["query"])
else:
criteria = parent_criteria
data = {
"query": criteria
}
try:
return requests.post(req, params=params, json=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseTaskException("Case tasks search error: {}".format(e))
def get_linked_cases(self, case_id):
"""
:param case_id: Case identifier
:return: TheHive case(s)
:rtype: json
"""
req = self.url + "/api/case/{}/links".format(case_id)
try:
return requests.get(req, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseException("Linked cases fetch error: {}".format(e))
def find_case_templates(self, **attributes):
"""
:return: list of case templates
:rtype: json
"""
return self.__find_rows("/api/case/template/_search", **attributes)
def get_case_template(self, name):
"""
:param name: Case template name
:return: TheHive case template
:rtype: json
"""
req = self.url + "/api/case/template/_search"
data = {
"query": And(Eq("name", name), Eq("status", "Ok"))
}
try:
response = requests.post(req, json=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
json_response = response.json()
if response.status_code == 200 and len(json_response) > 0:
return response.json()[0]
else:
raise CaseTemplateException("Case template fetch error: Unable to find case template {}".format(name))
except requests.exceptions.RequestException as e:
raise CaseTemplateException("Case template fetch error: {}".format(e))
def get_task_logs(self, taskId):
"""
:param taskId: Task identifier
:type caseTaskLog: CaseTaskLog defined in models.py
:return: TheHive logs
:rtype: json
"""
req = self.url + "/api/case/task/{}/log".format(taskId)
try:
return requests.get(req, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise CaseTaskException("Case task logs search error: {}".format(e))
def create_alert(self, alert):
"""
:param alert: TheHive alert
:type alert: Alert defined in models.py
:return: TheHive alert
:rtype: json
"""
req = self.url + "/api/alert"
data = alert.jsonify()
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, data=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise AlertException("Alert create error: {}".format(e))
def mark_alert_as_read(self, alert_id):
"""
Mark an alert as read.
:param alert_id: The ID of the alert to mark as read.
:return:
"""
req = self.url + "/api/alert/{}/markAsRead".format(alert_id)
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException:
raise AlertException("Mark alert as read error: {}".format(e))
def mark_alert_as_unread(self, alert_id):
"""
Mark an alert as unread.
:param alert_id: The ID of the alert to mark as unread.
:return:
"""
req = self.url + "/api/alert/{}/markAsUnread".format(alert_id)
try:
return requests.post(req, headers={'Content-Type': 'application/json'}, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException:
raise AlertException("Mark alert as unread error: {}".format(e))
def update_alert(self, alert_id, alert, fields=[]):
"""
Update an alert.
:param alert_id: The ID of the alert to update.
:param data: The alert to update.
:param fields: Optional parameter, an array of fields names, the ones we want to update
:return:
"""
req = self.url + "/api/alert/{}".format(alert_id)
# update only the alert attributes that are not read-only
update_keys = ['tlp', 'severity', 'tags', 'caseTemplate', 'title', 'description']
data = {k: v for k, v in alert.__dict__.items() if
(len(fields) > 0 and k in fields) or (len(fields) == 0 and k in update_keys)}
if hasattr(alert, 'artifacts'):
data['artifacts'] = [a.__dict__ for a in alert.artifacts]
try:
return requests.patch(req, headers={'Content-Type': 'application/json'}, json=data, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException:
raise AlertException("Alert update error: {}".format(e))
def get_alert(self, alert_id):
"""
:param alert_id: Alert identifier
:return: TheHive Alert
:rtype: json
"""
req = self.url + "/api/alert/{}".format(alert_id)
try:
return requests.get(req, proxies=self.proxies, auth=self.auth, verify=self.cert)
except requests.exceptions.RequestException as e:
raise AlertException("Alert fetch error: {}".format(e))
def find_alerts(self, **attributes):
"""
:return: list of Alerts
:rtype: json
"""
return self.__find_rows("/api/alert/_search", **attributes)
def promote_alert_to_case(self, alert_id):
"""
This uses the TheHiveAPI to promote an alert to a case
:param alert_id: Alert identifier
:return: TheHive Case
:rtype: json
"""
req = self.url + "/api/alert/{}/createCase".format(alert_id)
try:
return requests.post(req, headers={'Content-Type': 'application/json'},
proxies=self.proxies, auth=self.auth,
verify=self.cert, data=json.dumps({}))
except requests.exceptions.RequestException as the_exception:
raise AlertException("Couldn't promote alert to case: {}".format(the_exception))
return None
def find_tasks(self, **attributes):
"""
:return: list of Tasks
:rtype: json
"""
return self.__find_rows("/api/case/task/_search", **attributes)
|
kplindegaard/smbus2
|
smbus2/smbus2.py
|
i2c_msg.read
|
python
|
def read(address, length):
arr = create_string_buffer(length)
return i2c_msg(
addr=address, flags=I2C_M_RD, len=length,
buf=arr)
|
Prepares an i2c read transaction.
:param address: Slave address.
:type: address: int
:param length: Number of bytes to read.
:type: length: int
:return: New :py:class:`i2c_msg` instance for read operation.
:rtype: :py:class:`i2c_msg`
|
train
|
https://github.com/kplindegaard/smbus2/blob/a1088a03438dba84c266b73ad61b0c06750d0961/smbus2/smbus2.py#L158-L172
| null |
class i2c_msg(Structure):
"""
As defined in ``i2c.h``.
"""
_fields_ = [
('addr', c_uint16),
('flags', c_uint16),
('len', c_uint16),
('buf', POINTER(c_char))]
def __iter__(self):
return i2c_msg_iter(self)
def __len__(self):
return self.len
def __bytes__(self):
return string_at(self.buf, self.len)
def __repr__(self):
return 'i2c_msg(%d,%d,%r)' % (self.addr, self.flags, self.__bytes__())
def __str__(self):
s = self.__bytes__()
if sys.version_info.major >= 3:
s = ''.join(map(chr, s))
return s
@staticmethod
@staticmethod
def write(address, buf):
"""
Prepares an i2c write transaction.
:param address: Slave address.
:type address: int
:param buf: Bytes to write. Either list of values or str.
:type buf: list
:return: New :py:class:`i2c_msg` instance for write operation.
:rtype: :py:class:`i2c_msg`
"""
if sys.version_info.major >= 3:
if type(buf) is str:
buf = bytes(map(ord, buf))
else:
buf = bytes(buf)
else:
if type(buf) is not str:
buf = ''.join([chr(x) for x in buf])
arr = create_string_buffer(buf, len(buf))
return i2c_msg(
addr=address, flags=0, len=len(arr),
buf=arr)
|
kplindegaard/smbus2
|
smbus2/smbus2.py
|
i2c_msg.write
|
python
|
def write(address, buf):
if sys.version_info.major >= 3:
if type(buf) is str:
buf = bytes(map(ord, buf))
else:
buf = bytes(buf)
else:
if type(buf) is not str:
buf = ''.join([chr(x) for x in buf])
arr = create_string_buffer(buf, len(buf))
return i2c_msg(
addr=address, flags=0, len=len(arr),
buf=arr)
|
Prepares an i2c write transaction.
:param address: Slave address.
:type address: int
:param buf: Bytes to write. Either list of values or str.
:type buf: list
:return: New :py:class:`i2c_msg` instance for write operation.
:rtype: :py:class:`i2c_msg`
|
train
|
https://github.com/kplindegaard/smbus2/blob/a1088a03438dba84c266b73ad61b0c06750d0961/smbus2/smbus2.py#L175-L197
| null |
class i2c_msg(Structure):
"""
As defined in ``i2c.h``.
"""
_fields_ = [
('addr', c_uint16),
('flags', c_uint16),
('len', c_uint16),
('buf', POINTER(c_char))]
def __iter__(self):
return i2c_msg_iter(self)
def __len__(self):
return self.len
def __bytes__(self):
return string_at(self.buf, self.len)
def __repr__(self):
return 'i2c_msg(%d,%d,%r)' % (self.addr, self.flags, self.__bytes__())
def __str__(self):
s = self.__bytes__()
if sys.version_info.major >= 3:
s = ''.join(map(chr, s))
return s
@staticmethod
def read(address, length):
"""
Prepares an i2c read transaction.
:param address: Slave address.
:type: address: int
:param length: Number of bytes to read.
:type: length: int
:return: New :py:class:`i2c_msg` instance for read operation.
:rtype: :py:class:`i2c_msg`
"""
arr = create_string_buffer(length)
return i2c_msg(
addr=address, flags=I2C_M_RD, len=length,
buf=arr)
@staticmethod
|
kplindegaard/smbus2
|
smbus2/smbus2.py
|
i2c_rdwr_ioctl_data.create
|
python
|
def create(*i2c_msg_instances):
n_msg = len(i2c_msg_instances)
msg_array = (i2c_msg * n_msg)(*i2c_msg_instances)
return i2c_rdwr_ioctl_data(
msgs=msg_array,
nmsgs=n_msg
)
|
Factory method for creating a i2c_rdwr_ioctl_data struct that can
be called with ``ioctl(fd, I2C_RDWR, data)``.
:param i2c_msg_instances: Up to 42 i2c_msg instances
:rtype: i2c_rdwr_ioctl_data
|
train
|
https://github.com/kplindegaard/smbus2/blob/a1088a03438dba84c266b73ad61b0c06750d0961/smbus2/smbus2.py#L211-L224
| null |
class i2c_rdwr_ioctl_data(Structure):
"""
As defined in ``i2c-dev.h``.
"""
_fields_ = [
('msgs', POINTER(i2c_msg)),
('nmsgs', c_uint32)
]
__slots__ = [name for name, type in _fields_]
@staticmethod
|
kplindegaard/smbus2
|
smbus2/smbus2.py
|
SMBus.open
|
python
|
def open(self, bus):
self.fd = os.open("/dev/i2c-{}".format(bus), os.O_RDWR)
self.funcs = self._get_funcs()
|
Open a given i2c bus.
:param bus: i2c bus number (e.g. 0 or 1)
:type bus: int
|
train
|
https://github.com/kplindegaard/smbus2/blob/a1088a03438dba84c266b73ad61b0c06750d0961/smbus2/smbus2.py#L274-L282
|
[
"def _get_funcs(self):\n \"\"\"\n Returns a 32-bit value stating supported I2C functions.\n\n :rtype: int\n \"\"\"\n f = c_uint32()\n ioctl(self.fd, I2C_FUNCS, f)\n return f.value\n"
] |
class SMBus(object):
def __init__(self, bus=None, force=False):
"""
Initialize and (optionally) open an i2c bus connection.
:param bus: i2c bus number (e.g. 0 or 1). If not given, a subsequent
call to ``open()`` is required.
:type bus: int
:param force: force using the slave address even when driver is
already using it.
:type force: boolean
"""
self.fd = None
self.funcs = 0
if bus is not None:
self.open(bus)
self.address = None
self.force = force
self._force_last = None
def close(self):
"""
Close the i2c connection.
"""
if self.fd:
os.close(self.fd)
self.fd = None
def _set_address(self, address, force=None):
"""
Set i2c slave address to use for subsequent calls.
:param address:
:type address: int
:param force:
:type force: Boolean
"""
force = force if force is not None else self.force
if self.address != address or self._force_last != force:
if force is True:
ioctl(self.fd, I2C_SLAVE_FORCE, address)
else:
ioctl(self.fd, I2C_SLAVE, address)
self.address = address
self._force_last = force
def _get_funcs(self):
"""
Returns a 32-bit value stating supported I2C functions.
:rtype: int
"""
f = c_uint32()
ioctl(self.fd, I2C_FUNCS, f)
return f.value
def write_quick(self, i2c_addr, force=None):
"""
Perform quick transaction. Throws IOError if unsuccessful.
:param i2c_addr: i2c address
:type i2c_addr: int
:param force:
:type force: Boolean
"""
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_WRITE, command=0, size=I2C_SMBUS_QUICK)
ioctl(self.fd, I2C_SMBUS, msg)
def read_byte(self, i2c_addr, force=None):
"""
Read a single byte from a device.
:rtype: int
:param i2c_addr: i2c address
:type i2c_addr: int
:param force:
:type force: Boolean
:return: Read byte value
"""
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_READ, command=0, size=I2C_SMBUS_BYTE
)
ioctl(self.fd, I2C_SMBUS, msg)
return msg.data.contents.byte
def write_byte(self, i2c_addr, value, force=None):
"""
Write a single byte to a device.
:param i2c_addr: i2c address
:type i2c_addr: int
:param value: value to write
:type value: int
:param force:
:type force: Boolean
"""
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_WRITE, command=value, size=I2C_SMBUS_BYTE
)
ioctl(self.fd, I2C_SMBUS, msg)
def read_byte_data(self, i2c_addr, register, force=None):
"""
Read a single byte from a designated register.
:param i2c_addr: i2c address
:type i2c_addr: int
:param register: Register to read
:type register: int
:param force:
:type force: Boolean
:return: Read byte value
:rtype: int
"""
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_READ, command=register, size=I2C_SMBUS_BYTE_DATA
)
ioctl(self.fd, I2C_SMBUS, msg)
return msg.data.contents.byte
def write_byte_data(self, i2c_addr, register, value, force=None):
"""
Write a byte to a given register.
:param i2c_addr: i2c address
:type i2c_addr: int
:param register: Register to write to
:type register: int
:param value: Byte value to transmit
:type value: int
:param force:
:type force: Boolean
:rtype: None
"""
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_WRITE, command=register, size=I2C_SMBUS_BYTE_DATA
)
msg.data.contents.byte = value
ioctl(self.fd, I2C_SMBUS, msg)
def read_word_data(self, i2c_addr, register, force=None):
"""
Read a single word (2 bytes) from a given register.
:param i2c_addr: i2c address
:type i2c_addr: int
:param register: Register to read
:type register: int
:param force:
:type force: Boolean
:return: 2-byte word
:rtype: int
"""
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_READ, command=register, size=I2C_SMBUS_WORD_DATA
)
ioctl(self.fd, I2C_SMBUS, msg)
return msg.data.contents.word
def write_word_data(self, i2c_addr, register, value, force=None):
"""
Write a byte to a given register.
:param i2c_addr: i2c address
:type i2c_addr: int
:param register: Register to write to
:type register: int
:param value: Word value to transmit
:type value: int
:param force:
:type force: Boolean
:rtype: None
"""
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_WRITE, command=register, size=I2C_SMBUS_WORD_DATA
)
msg.data.contents.word = value
ioctl(self.fd, I2C_SMBUS, msg)
def process_call(self, i2c_addr, register, value, force=None):
"""
Executes a SMBus Process Call, sending a 16-bit value and receiving a 16-bit response
:param i2c_addr: i2c address
:type i2c_addr: int
:param register: Register to read/write to
:type register: int
:param value: Word value to transmit
:type value: int
:param force:
:type force: Boolean
:rtype: int
"""
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_WRITE, command=register, size=I2C_SMBUS_PROC_CALL
)
msg.data.contents.word = value
ioctl(self.fd, I2C_SMBUS, msg)
return msg.data.contents.word
def read_block_data(self, i2c_addr, register, force=None):
"""
Read a block of up to 32-bytes from a given register.
:param i2c_addr: i2c address
:type i2c_addr: int
:param register: Start register
:type register: int
:param force:
:type force: Boolean
:return: List of bytes
:rtype: list
"""
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_READ, command=register, size=I2C_SMBUS_BLOCK_DATA
)
ioctl(self.fd, I2C_SMBUS, msg)
length = msg.data.contents.block[0]
return msg.data.contents.block[1:length + 1]
def write_block_data(self, i2c_addr, register, data, force=None):
"""
Write a block of byte data to a given register.
:param i2c_addr: i2c address
:type i2c_addr: int
:param register: Start register
:type register: int
:param data: List of bytes
:type data: list
:param force:
:type force: Boolean
:rtype: None
"""
length = len(data)
if length > I2C_SMBUS_BLOCK_MAX:
raise ValueError("Data length cannot exceed %d bytes" % I2C_SMBUS_BLOCK_MAX)
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_WRITE, command=register, size=I2C_SMBUS_BLOCK_DATA
)
msg.data.contents.block[0] = length
msg.data.contents.block[1:length + 1] = data
ioctl(self.fd, I2C_SMBUS, msg)
def block_process_call(self, i2c_addr, register, data, force=None):
"""
Executes a SMBus Block Process Call, sending a variable-size data block and receiving another variable-size response
:param i2c_addr: i2c address
:type i2c_addr: int
:param register: Register to read/write to
:type register: int
:param data: List of bytes
:type data: list
:param force:
:type force: Boolean
:return: List of bytes
:rtype: list
"""
length = len(data)
if length > I2C_SMBUS_BLOCK_MAX:
raise ValueError("Data length cannot exceed %d bytes" % I2C_SMBUS_BLOCK_MAX)
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_WRITE, command=register, size=I2C_SMBUS_BLOCK_PROC_CALL
)
msg.data.contents.block[0] = length
msg.data.contents.block[1:length + 1] = data
ioctl(self.fd, I2C_SMBUS, msg)
length = msg.data.contents.block[0]
return msg.data.contents.block[1:length + 1]
def read_i2c_block_data(self, i2c_addr, register, length, force=None):
"""
Read a block of byte data from a given register.
:param i2c_addr: i2c address
:type i2c_addr: int
:param register: Start register
:type register: int
:param length: Desired block length
:type length: int
:param force:
:type force: Boolean
:return: List of bytes
:rtype: list
"""
if length > I2C_SMBUS_BLOCK_MAX:
raise ValueError("Desired block length over %d bytes" % I2C_SMBUS_BLOCK_MAX)
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_READ, command=register, size=I2C_SMBUS_I2C_BLOCK_DATA
)
msg.data.contents.byte = length
ioctl(self.fd, I2C_SMBUS, msg)
return msg.data.contents.block[1:length + 1]
def write_i2c_block_data(self, i2c_addr, register, data, force=None):
"""
Write a block of byte data to a given register.
:param i2c_addr: i2c address
:type i2c_addr: int
:param register: Start register
:type register: int
:param data: List of bytes
:type data: list
:param force:
:type force: Boolean
:rtype: None
"""
length = len(data)
if length > I2C_SMBUS_BLOCK_MAX:
raise ValueError("Data length cannot exceed %d bytes" % I2C_SMBUS_BLOCK_MAX)
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_WRITE, command=register, size=I2C_SMBUS_I2C_BLOCK_DATA
)
msg.data.contents.block[0] = length
msg.data.contents.block[1:length + 1] = data
ioctl(self.fd, I2C_SMBUS, msg)
def i2c_rdwr(self, *i2c_msgs):
"""
Combine a series of i2c read and write operations in a single
transaction (with repeated start bits but no stop bits in between).
This method takes i2c_msg instances as input, which must be created
first with :py:meth:`i2c_msg.read` or :py:meth:`i2c_msg.write`.
:param i2c_msgs: One or more i2c_msg class instances.
:type i2c_msgs: i2c_msg
:rtype: None
"""
ioctl_data = i2c_rdwr_ioctl_data.create(*i2c_msgs)
ioctl(self.fd, I2C_RDWR, ioctl_data)
|
kplindegaard/smbus2
|
smbus2/smbus2.py
|
SMBus.close
|
python
|
def close(self):
if self.fd:
os.close(self.fd)
self.fd = None
|
Close the i2c connection.
|
train
|
https://github.com/kplindegaard/smbus2/blob/a1088a03438dba84c266b73ad61b0c06750d0961/smbus2/smbus2.py#L284-L290
| null |
class SMBus(object):
def __init__(self, bus=None, force=False):
"""
Initialize and (optionally) open an i2c bus connection.
:param bus: i2c bus number (e.g. 0 or 1). If not given, a subsequent
call to ``open()`` is required.
:type bus: int
:param force: force using the slave address even when driver is
already using it.
:type force: boolean
"""
self.fd = None
self.funcs = 0
if bus is not None:
self.open(bus)
self.address = None
self.force = force
self._force_last = None
def open(self, bus):
"""
Open a given i2c bus.
:param bus: i2c bus number (e.g. 0 or 1)
:type bus: int
"""
self.fd = os.open("/dev/i2c-{}".format(bus), os.O_RDWR)
self.funcs = self._get_funcs()
def _set_address(self, address, force=None):
"""
Set i2c slave address to use for subsequent calls.
:param address:
:type address: int
:param force:
:type force: Boolean
"""
force = force if force is not None else self.force
if self.address != address or self._force_last != force:
if force is True:
ioctl(self.fd, I2C_SLAVE_FORCE, address)
else:
ioctl(self.fd, I2C_SLAVE, address)
self.address = address
self._force_last = force
def _get_funcs(self):
"""
Returns a 32-bit value stating supported I2C functions.
:rtype: int
"""
f = c_uint32()
ioctl(self.fd, I2C_FUNCS, f)
return f.value
def write_quick(self, i2c_addr, force=None):
"""
Perform quick transaction. Throws IOError if unsuccessful.
:param i2c_addr: i2c address
:type i2c_addr: int
:param force:
:type force: Boolean
"""
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_WRITE, command=0, size=I2C_SMBUS_QUICK)
ioctl(self.fd, I2C_SMBUS, msg)
def read_byte(self, i2c_addr, force=None):
"""
Read a single byte from a device.
:rtype: int
:param i2c_addr: i2c address
:type i2c_addr: int
:param force:
:type force: Boolean
:return: Read byte value
"""
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_READ, command=0, size=I2C_SMBUS_BYTE
)
ioctl(self.fd, I2C_SMBUS, msg)
return msg.data.contents.byte
def write_byte(self, i2c_addr, value, force=None):
"""
Write a single byte to a device.
:param i2c_addr: i2c address
:type i2c_addr: int
:param value: value to write
:type value: int
:param force:
:type force: Boolean
"""
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_WRITE, command=value, size=I2C_SMBUS_BYTE
)
ioctl(self.fd, I2C_SMBUS, msg)
def read_byte_data(self, i2c_addr, register, force=None):
"""
Read a single byte from a designated register.
:param i2c_addr: i2c address
:type i2c_addr: int
:param register: Register to read
:type register: int
:param force:
:type force: Boolean
:return: Read byte value
:rtype: int
"""
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_READ, command=register, size=I2C_SMBUS_BYTE_DATA
)
ioctl(self.fd, I2C_SMBUS, msg)
return msg.data.contents.byte
def write_byte_data(self, i2c_addr, register, value, force=None):
"""
Write a byte to a given register.
:param i2c_addr: i2c address
:type i2c_addr: int
:param register: Register to write to
:type register: int
:param value: Byte value to transmit
:type value: int
:param force:
:type force: Boolean
:rtype: None
"""
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_WRITE, command=register, size=I2C_SMBUS_BYTE_DATA
)
msg.data.contents.byte = value
ioctl(self.fd, I2C_SMBUS, msg)
def read_word_data(self, i2c_addr, register, force=None):
"""
Read a single word (2 bytes) from a given register.
:param i2c_addr: i2c address
:type i2c_addr: int
:param register: Register to read
:type register: int
:param force:
:type force: Boolean
:return: 2-byte word
:rtype: int
"""
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_READ, command=register, size=I2C_SMBUS_WORD_DATA
)
ioctl(self.fd, I2C_SMBUS, msg)
return msg.data.contents.word
def write_word_data(self, i2c_addr, register, value, force=None):
"""
Write a byte to a given register.
:param i2c_addr: i2c address
:type i2c_addr: int
:param register: Register to write to
:type register: int
:param value: Word value to transmit
:type value: int
:param force:
:type force: Boolean
:rtype: None
"""
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_WRITE, command=register, size=I2C_SMBUS_WORD_DATA
)
msg.data.contents.word = value
ioctl(self.fd, I2C_SMBUS, msg)
def process_call(self, i2c_addr, register, value, force=None):
"""
Executes a SMBus Process Call, sending a 16-bit value and receiving a 16-bit response
:param i2c_addr: i2c address
:type i2c_addr: int
:param register: Register to read/write to
:type register: int
:param value: Word value to transmit
:type value: int
:param force:
:type force: Boolean
:rtype: int
"""
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_WRITE, command=register, size=I2C_SMBUS_PROC_CALL
)
msg.data.contents.word = value
ioctl(self.fd, I2C_SMBUS, msg)
return msg.data.contents.word
def read_block_data(self, i2c_addr, register, force=None):
"""
Read a block of up to 32-bytes from a given register.
:param i2c_addr: i2c address
:type i2c_addr: int
:param register: Start register
:type register: int
:param force:
:type force: Boolean
:return: List of bytes
:rtype: list
"""
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_READ, command=register, size=I2C_SMBUS_BLOCK_DATA
)
ioctl(self.fd, I2C_SMBUS, msg)
length = msg.data.contents.block[0]
return msg.data.contents.block[1:length + 1]
def write_block_data(self, i2c_addr, register, data, force=None):
"""
Write a block of byte data to a given register.
:param i2c_addr: i2c address
:type i2c_addr: int
:param register: Start register
:type register: int
:param data: List of bytes
:type data: list
:param force:
:type force: Boolean
:rtype: None
"""
length = len(data)
if length > I2C_SMBUS_BLOCK_MAX:
raise ValueError("Data length cannot exceed %d bytes" % I2C_SMBUS_BLOCK_MAX)
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_WRITE, command=register, size=I2C_SMBUS_BLOCK_DATA
)
msg.data.contents.block[0] = length
msg.data.contents.block[1:length + 1] = data
ioctl(self.fd, I2C_SMBUS, msg)
def block_process_call(self, i2c_addr, register, data, force=None):
"""
Executes a SMBus Block Process Call, sending a variable-size data block and receiving another variable-size response
:param i2c_addr: i2c address
:type i2c_addr: int
:param register: Register to read/write to
:type register: int
:param data: List of bytes
:type data: list
:param force:
:type force: Boolean
:return: List of bytes
:rtype: list
"""
length = len(data)
if length > I2C_SMBUS_BLOCK_MAX:
raise ValueError("Data length cannot exceed %d bytes" % I2C_SMBUS_BLOCK_MAX)
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_WRITE, command=register, size=I2C_SMBUS_BLOCK_PROC_CALL
)
msg.data.contents.block[0] = length
msg.data.contents.block[1:length + 1] = data
ioctl(self.fd, I2C_SMBUS, msg)
length = msg.data.contents.block[0]
return msg.data.contents.block[1:length + 1]
def read_i2c_block_data(self, i2c_addr, register, length, force=None):
"""
Read a block of byte data from a given register.
:param i2c_addr: i2c address
:type i2c_addr: int
:param register: Start register
:type register: int
:param length: Desired block length
:type length: int
:param force:
:type force: Boolean
:return: List of bytes
:rtype: list
"""
if length > I2C_SMBUS_BLOCK_MAX:
raise ValueError("Desired block length over %d bytes" % I2C_SMBUS_BLOCK_MAX)
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_READ, command=register, size=I2C_SMBUS_I2C_BLOCK_DATA
)
msg.data.contents.byte = length
ioctl(self.fd, I2C_SMBUS, msg)
return msg.data.contents.block[1:length + 1]
def write_i2c_block_data(self, i2c_addr, register, data, force=None):
"""
Write a block of byte data to a given register.
:param i2c_addr: i2c address
:type i2c_addr: int
:param register: Start register
:type register: int
:param data: List of bytes
:type data: list
:param force:
:type force: Boolean
:rtype: None
"""
length = len(data)
if length > I2C_SMBUS_BLOCK_MAX:
raise ValueError("Data length cannot exceed %d bytes" % I2C_SMBUS_BLOCK_MAX)
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_WRITE, command=register, size=I2C_SMBUS_I2C_BLOCK_DATA
)
msg.data.contents.block[0] = length
msg.data.contents.block[1:length + 1] = data
ioctl(self.fd, I2C_SMBUS, msg)
def i2c_rdwr(self, *i2c_msgs):
"""
Combine a series of i2c read and write operations in a single
transaction (with repeated start bits but no stop bits in between).
This method takes i2c_msg instances as input, which must be created
first with :py:meth:`i2c_msg.read` or :py:meth:`i2c_msg.write`.
:param i2c_msgs: One or more i2c_msg class instances.
:type i2c_msgs: i2c_msg
:rtype: None
"""
ioctl_data = i2c_rdwr_ioctl_data.create(*i2c_msgs)
ioctl(self.fd, I2C_RDWR, ioctl_data)
|
kplindegaard/smbus2
|
smbus2/smbus2.py
|
SMBus._set_address
|
python
|
def _set_address(self, address, force=None):
force = force if force is not None else self.force
if self.address != address or self._force_last != force:
if force is True:
ioctl(self.fd, I2C_SLAVE_FORCE, address)
else:
ioctl(self.fd, I2C_SLAVE, address)
self.address = address
self._force_last = force
|
Set i2c slave address to use for subsequent calls.
:param address:
:type address: int
:param force:
:type force: Boolean
|
train
|
https://github.com/kplindegaard/smbus2/blob/a1088a03438dba84c266b73ad61b0c06750d0961/smbus2/smbus2.py#L292-L308
| null |
class SMBus(object):
def __init__(self, bus=None, force=False):
"""
Initialize and (optionally) open an i2c bus connection.
:param bus: i2c bus number (e.g. 0 or 1). If not given, a subsequent
call to ``open()`` is required.
:type bus: int
:param force: force using the slave address even when driver is
already using it.
:type force: boolean
"""
self.fd = None
self.funcs = 0
if bus is not None:
self.open(bus)
self.address = None
self.force = force
self._force_last = None
def open(self, bus):
"""
Open a given i2c bus.
:param bus: i2c bus number (e.g. 0 or 1)
:type bus: int
"""
self.fd = os.open("/dev/i2c-{}".format(bus), os.O_RDWR)
self.funcs = self._get_funcs()
def close(self):
"""
Close the i2c connection.
"""
if self.fd:
os.close(self.fd)
self.fd = None
def _get_funcs(self):
"""
Returns a 32-bit value stating supported I2C functions.
:rtype: int
"""
f = c_uint32()
ioctl(self.fd, I2C_FUNCS, f)
return f.value
def write_quick(self, i2c_addr, force=None):
"""
Perform quick transaction. Throws IOError if unsuccessful.
:param i2c_addr: i2c address
:type i2c_addr: int
:param force:
:type force: Boolean
"""
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_WRITE, command=0, size=I2C_SMBUS_QUICK)
ioctl(self.fd, I2C_SMBUS, msg)
def read_byte(self, i2c_addr, force=None):
"""
Read a single byte from a device.
:rtype: int
:param i2c_addr: i2c address
:type i2c_addr: int
:param force:
:type force: Boolean
:return: Read byte value
"""
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_READ, command=0, size=I2C_SMBUS_BYTE
)
ioctl(self.fd, I2C_SMBUS, msg)
return msg.data.contents.byte
def write_byte(self, i2c_addr, value, force=None):
"""
Write a single byte to a device.
:param i2c_addr: i2c address
:type i2c_addr: int
:param value: value to write
:type value: int
:param force:
:type force: Boolean
"""
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_WRITE, command=value, size=I2C_SMBUS_BYTE
)
ioctl(self.fd, I2C_SMBUS, msg)
def read_byte_data(self, i2c_addr, register, force=None):
"""
Read a single byte from a designated register.
:param i2c_addr: i2c address
:type i2c_addr: int
:param register: Register to read
:type register: int
:param force:
:type force: Boolean
:return: Read byte value
:rtype: int
"""
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_READ, command=register, size=I2C_SMBUS_BYTE_DATA
)
ioctl(self.fd, I2C_SMBUS, msg)
return msg.data.contents.byte
def write_byte_data(self, i2c_addr, register, value, force=None):
"""
Write a byte to a given register.
:param i2c_addr: i2c address
:type i2c_addr: int
:param register: Register to write to
:type register: int
:param value: Byte value to transmit
:type value: int
:param force:
:type force: Boolean
:rtype: None
"""
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_WRITE, command=register, size=I2C_SMBUS_BYTE_DATA
)
msg.data.contents.byte = value
ioctl(self.fd, I2C_SMBUS, msg)
def read_word_data(self, i2c_addr, register, force=None):
"""
Read a single word (2 bytes) from a given register.
:param i2c_addr: i2c address
:type i2c_addr: int
:param register: Register to read
:type register: int
:param force:
:type force: Boolean
:return: 2-byte word
:rtype: int
"""
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_READ, command=register, size=I2C_SMBUS_WORD_DATA
)
ioctl(self.fd, I2C_SMBUS, msg)
return msg.data.contents.word
def write_word_data(self, i2c_addr, register, value, force=None):
"""
Write a byte to a given register.
:param i2c_addr: i2c address
:type i2c_addr: int
:param register: Register to write to
:type register: int
:param value: Word value to transmit
:type value: int
:param force:
:type force: Boolean
:rtype: None
"""
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_WRITE, command=register, size=I2C_SMBUS_WORD_DATA
)
msg.data.contents.word = value
ioctl(self.fd, I2C_SMBUS, msg)
def process_call(self, i2c_addr, register, value, force=None):
"""
Executes a SMBus Process Call, sending a 16-bit value and receiving a 16-bit response
:param i2c_addr: i2c address
:type i2c_addr: int
:param register: Register to read/write to
:type register: int
:param value: Word value to transmit
:type value: int
:param force:
:type force: Boolean
:rtype: int
"""
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_WRITE, command=register, size=I2C_SMBUS_PROC_CALL
)
msg.data.contents.word = value
ioctl(self.fd, I2C_SMBUS, msg)
return msg.data.contents.word
def read_block_data(self, i2c_addr, register, force=None):
"""
Read a block of up to 32-bytes from a given register.
:param i2c_addr: i2c address
:type i2c_addr: int
:param register: Start register
:type register: int
:param force:
:type force: Boolean
:return: List of bytes
:rtype: list
"""
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_READ, command=register, size=I2C_SMBUS_BLOCK_DATA
)
ioctl(self.fd, I2C_SMBUS, msg)
length = msg.data.contents.block[0]
return msg.data.contents.block[1:length + 1]
def write_block_data(self, i2c_addr, register, data, force=None):
"""
Write a block of byte data to a given register.
:param i2c_addr: i2c address
:type i2c_addr: int
:param register: Start register
:type register: int
:param data: List of bytes
:type data: list
:param force:
:type force: Boolean
:rtype: None
"""
length = len(data)
if length > I2C_SMBUS_BLOCK_MAX:
raise ValueError("Data length cannot exceed %d bytes" % I2C_SMBUS_BLOCK_MAX)
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_WRITE, command=register, size=I2C_SMBUS_BLOCK_DATA
)
msg.data.contents.block[0] = length
msg.data.contents.block[1:length + 1] = data
ioctl(self.fd, I2C_SMBUS, msg)
def block_process_call(self, i2c_addr, register, data, force=None):
"""
Executes a SMBus Block Process Call, sending a variable-size data block and receiving another variable-size response
:param i2c_addr: i2c address
:type i2c_addr: int
:param register: Register to read/write to
:type register: int
:param data: List of bytes
:type data: list
:param force:
:type force: Boolean
:return: List of bytes
:rtype: list
"""
length = len(data)
if length > I2C_SMBUS_BLOCK_MAX:
raise ValueError("Data length cannot exceed %d bytes" % I2C_SMBUS_BLOCK_MAX)
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_WRITE, command=register, size=I2C_SMBUS_BLOCK_PROC_CALL
)
msg.data.contents.block[0] = length
msg.data.contents.block[1:length + 1] = data
ioctl(self.fd, I2C_SMBUS, msg)
length = msg.data.contents.block[0]
return msg.data.contents.block[1:length + 1]
def read_i2c_block_data(self, i2c_addr, register, length, force=None):
"""
Read a block of byte data from a given register.
:param i2c_addr: i2c address
:type i2c_addr: int
:param register: Start register
:type register: int
:param length: Desired block length
:type length: int
:param force:
:type force: Boolean
:return: List of bytes
:rtype: list
"""
if length > I2C_SMBUS_BLOCK_MAX:
raise ValueError("Desired block length over %d bytes" % I2C_SMBUS_BLOCK_MAX)
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_READ, command=register, size=I2C_SMBUS_I2C_BLOCK_DATA
)
msg.data.contents.byte = length
ioctl(self.fd, I2C_SMBUS, msg)
return msg.data.contents.block[1:length + 1]
def write_i2c_block_data(self, i2c_addr, register, data, force=None):
"""
Write a block of byte data to a given register.
:param i2c_addr: i2c address
:type i2c_addr: int
:param register: Start register
:type register: int
:param data: List of bytes
:type data: list
:param force:
:type force: Boolean
:rtype: None
"""
length = len(data)
if length > I2C_SMBUS_BLOCK_MAX:
raise ValueError("Data length cannot exceed %d bytes" % I2C_SMBUS_BLOCK_MAX)
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_WRITE, command=register, size=I2C_SMBUS_I2C_BLOCK_DATA
)
msg.data.contents.block[0] = length
msg.data.contents.block[1:length + 1] = data
ioctl(self.fd, I2C_SMBUS, msg)
def i2c_rdwr(self, *i2c_msgs):
"""
Combine a series of i2c read and write operations in a single
transaction (with repeated start bits but no stop bits in between).
This method takes i2c_msg instances as input, which must be created
first with :py:meth:`i2c_msg.read` or :py:meth:`i2c_msg.write`.
:param i2c_msgs: One or more i2c_msg class instances.
:type i2c_msgs: i2c_msg
:rtype: None
"""
ioctl_data = i2c_rdwr_ioctl_data.create(*i2c_msgs)
ioctl(self.fd, I2C_RDWR, ioctl_data)
|
kplindegaard/smbus2
|
smbus2/smbus2.py
|
SMBus._get_funcs
|
python
|
def _get_funcs(self):
f = c_uint32()
ioctl(self.fd, I2C_FUNCS, f)
return f.value
|
Returns a 32-bit value stating supported I2C functions.
:rtype: int
|
train
|
https://github.com/kplindegaard/smbus2/blob/a1088a03438dba84c266b73ad61b0c06750d0961/smbus2/smbus2.py#L310-L318
| null |
class SMBus(object):
def __init__(self, bus=None, force=False):
"""
Initialize and (optionally) open an i2c bus connection.
:param bus: i2c bus number (e.g. 0 or 1). If not given, a subsequent
call to ``open()`` is required.
:type bus: int
:param force: force using the slave address even when driver is
already using it.
:type force: boolean
"""
self.fd = None
self.funcs = 0
if bus is not None:
self.open(bus)
self.address = None
self.force = force
self._force_last = None
def open(self, bus):
"""
Open a given i2c bus.
:param bus: i2c bus number (e.g. 0 or 1)
:type bus: int
"""
self.fd = os.open("/dev/i2c-{}".format(bus), os.O_RDWR)
self.funcs = self._get_funcs()
def close(self):
"""
Close the i2c connection.
"""
if self.fd:
os.close(self.fd)
self.fd = None
def _set_address(self, address, force=None):
"""
Set i2c slave address to use for subsequent calls.
:param address:
:type address: int
:param force:
:type force: Boolean
"""
force = force if force is not None else self.force
if self.address != address or self._force_last != force:
if force is True:
ioctl(self.fd, I2C_SLAVE_FORCE, address)
else:
ioctl(self.fd, I2C_SLAVE, address)
self.address = address
self._force_last = force
def write_quick(self, i2c_addr, force=None):
"""
Perform quick transaction. Throws IOError if unsuccessful.
:param i2c_addr: i2c address
:type i2c_addr: int
:param force:
:type force: Boolean
"""
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_WRITE, command=0, size=I2C_SMBUS_QUICK)
ioctl(self.fd, I2C_SMBUS, msg)
def read_byte(self, i2c_addr, force=None):
"""
Read a single byte from a device.
:rtype: int
:param i2c_addr: i2c address
:type i2c_addr: int
:param force:
:type force: Boolean
:return: Read byte value
"""
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_READ, command=0, size=I2C_SMBUS_BYTE
)
ioctl(self.fd, I2C_SMBUS, msg)
return msg.data.contents.byte
def write_byte(self, i2c_addr, value, force=None):
"""
Write a single byte to a device.
:param i2c_addr: i2c address
:type i2c_addr: int
:param value: value to write
:type value: int
:param force:
:type force: Boolean
"""
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_WRITE, command=value, size=I2C_SMBUS_BYTE
)
ioctl(self.fd, I2C_SMBUS, msg)
def read_byte_data(self, i2c_addr, register, force=None):
"""
Read a single byte from a designated register.
:param i2c_addr: i2c address
:type i2c_addr: int
:param register: Register to read
:type register: int
:param force:
:type force: Boolean
:return: Read byte value
:rtype: int
"""
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_READ, command=register, size=I2C_SMBUS_BYTE_DATA
)
ioctl(self.fd, I2C_SMBUS, msg)
return msg.data.contents.byte
def write_byte_data(self, i2c_addr, register, value, force=None):
"""
Write a byte to a given register.
:param i2c_addr: i2c address
:type i2c_addr: int
:param register: Register to write to
:type register: int
:param value: Byte value to transmit
:type value: int
:param force:
:type force: Boolean
:rtype: None
"""
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_WRITE, command=register, size=I2C_SMBUS_BYTE_DATA
)
msg.data.contents.byte = value
ioctl(self.fd, I2C_SMBUS, msg)
def read_word_data(self, i2c_addr, register, force=None):
"""
Read a single word (2 bytes) from a given register.
:param i2c_addr: i2c address
:type i2c_addr: int
:param register: Register to read
:type register: int
:param force:
:type force: Boolean
:return: 2-byte word
:rtype: int
"""
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_READ, command=register, size=I2C_SMBUS_WORD_DATA
)
ioctl(self.fd, I2C_SMBUS, msg)
return msg.data.contents.word
def write_word_data(self, i2c_addr, register, value, force=None):
"""
Write a byte to a given register.
:param i2c_addr: i2c address
:type i2c_addr: int
:param register: Register to write to
:type register: int
:param value: Word value to transmit
:type value: int
:param force:
:type force: Boolean
:rtype: None
"""
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_WRITE, command=register, size=I2C_SMBUS_WORD_DATA
)
msg.data.contents.word = value
ioctl(self.fd, I2C_SMBUS, msg)
def process_call(self, i2c_addr, register, value, force=None):
"""
Executes a SMBus Process Call, sending a 16-bit value and receiving a 16-bit response
:param i2c_addr: i2c address
:type i2c_addr: int
:param register: Register to read/write to
:type register: int
:param value: Word value to transmit
:type value: int
:param force:
:type force: Boolean
:rtype: int
"""
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_WRITE, command=register, size=I2C_SMBUS_PROC_CALL
)
msg.data.contents.word = value
ioctl(self.fd, I2C_SMBUS, msg)
return msg.data.contents.word
def read_block_data(self, i2c_addr, register, force=None):
"""
Read a block of up to 32-bytes from a given register.
:param i2c_addr: i2c address
:type i2c_addr: int
:param register: Start register
:type register: int
:param force:
:type force: Boolean
:return: List of bytes
:rtype: list
"""
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_READ, command=register, size=I2C_SMBUS_BLOCK_DATA
)
ioctl(self.fd, I2C_SMBUS, msg)
length = msg.data.contents.block[0]
return msg.data.contents.block[1:length + 1]
def write_block_data(self, i2c_addr, register, data, force=None):
"""
Write a block of byte data to a given register.
:param i2c_addr: i2c address
:type i2c_addr: int
:param register: Start register
:type register: int
:param data: List of bytes
:type data: list
:param force:
:type force: Boolean
:rtype: None
"""
length = len(data)
if length > I2C_SMBUS_BLOCK_MAX:
raise ValueError("Data length cannot exceed %d bytes" % I2C_SMBUS_BLOCK_MAX)
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_WRITE, command=register, size=I2C_SMBUS_BLOCK_DATA
)
msg.data.contents.block[0] = length
msg.data.contents.block[1:length + 1] = data
ioctl(self.fd, I2C_SMBUS, msg)
def block_process_call(self, i2c_addr, register, data, force=None):
"""
Executes a SMBus Block Process Call, sending a variable-size data block and receiving another variable-size response
:param i2c_addr: i2c address
:type i2c_addr: int
:param register: Register to read/write to
:type register: int
:param data: List of bytes
:type data: list
:param force:
:type force: Boolean
:return: List of bytes
:rtype: list
"""
length = len(data)
if length > I2C_SMBUS_BLOCK_MAX:
raise ValueError("Data length cannot exceed %d bytes" % I2C_SMBUS_BLOCK_MAX)
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_WRITE, command=register, size=I2C_SMBUS_BLOCK_PROC_CALL
)
msg.data.contents.block[0] = length
msg.data.contents.block[1:length + 1] = data
ioctl(self.fd, I2C_SMBUS, msg)
length = msg.data.contents.block[0]
return msg.data.contents.block[1:length + 1]
def read_i2c_block_data(self, i2c_addr, register, length, force=None):
"""
Read a block of byte data from a given register.
:param i2c_addr: i2c address
:type i2c_addr: int
:param register: Start register
:type register: int
:param length: Desired block length
:type length: int
:param force:
:type force: Boolean
:return: List of bytes
:rtype: list
"""
if length > I2C_SMBUS_BLOCK_MAX:
raise ValueError("Desired block length over %d bytes" % I2C_SMBUS_BLOCK_MAX)
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_READ, command=register, size=I2C_SMBUS_I2C_BLOCK_DATA
)
msg.data.contents.byte = length
ioctl(self.fd, I2C_SMBUS, msg)
return msg.data.contents.block[1:length + 1]
def write_i2c_block_data(self, i2c_addr, register, data, force=None):
"""
Write a block of byte data to a given register.
:param i2c_addr: i2c address
:type i2c_addr: int
:param register: Start register
:type register: int
:param data: List of bytes
:type data: list
:param force:
:type force: Boolean
:rtype: None
"""
length = len(data)
if length > I2C_SMBUS_BLOCK_MAX:
raise ValueError("Data length cannot exceed %d bytes" % I2C_SMBUS_BLOCK_MAX)
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_WRITE, command=register, size=I2C_SMBUS_I2C_BLOCK_DATA
)
msg.data.contents.block[0] = length
msg.data.contents.block[1:length + 1] = data
ioctl(self.fd, I2C_SMBUS, msg)
def i2c_rdwr(self, *i2c_msgs):
"""
Combine a series of i2c read and write operations in a single
transaction (with repeated start bits but no stop bits in between).
This method takes i2c_msg instances as input, which must be created
first with :py:meth:`i2c_msg.read` or :py:meth:`i2c_msg.write`.
:param i2c_msgs: One or more i2c_msg class instances.
:type i2c_msgs: i2c_msg
:rtype: None
"""
ioctl_data = i2c_rdwr_ioctl_data.create(*i2c_msgs)
ioctl(self.fd, I2C_RDWR, ioctl_data)
|
kplindegaard/smbus2
|
smbus2/smbus2.py
|
SMBus.write_quick
|
python
|
def write_quick(self, i2c_addr, force=None):
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_WRITE, command=0, size=I2C_SMBUS_QUICK)
ioctl(self.fd, I2C_SMBUS, msg)
|
Perform quick transaction. Throws IOError if unsuccessful.
:param i2c_addr: i2c address
:type i2c_addr: int
:param force:
:type force: Boolean
|
train
|
https://github.com/kplindegaard/smbus2/blob/a1088a03438dba84c266b73ad61b0c06750d0961/smbus2/smbus2.py#L320-L331
|
[
"def create(read_write=I2C_SMBUS_READ, command=0, size=I2C_SMBUS_BYTE_DATA):\n u = union_i2c_smbus_data()\n return i2c_smbus_ioctl_data(\n read_write=read_write, command=command, size=size,\n data=union_pointer_type(u))\n",
"def _set_address(self, address, force=None):\n \"\"\"\n Set i2c slave address to use for subsequent calls.\n\n :param address:\n :type address: int\n :param force:\n :type force: Boolean\n \"\"\"\n force = force if force is not None else self.force\n if self.address != address or self._force_last != force:\n if force is True:\n ioctl(self.fd, I2C_SLAVE_FORCE, address)\n else:\n ioctl(self.fd, I2C_SLAVE, address)\n self.address = address\n self._force_last = force\n"
] |
class SMBus(object):
def __init__(self, bus=None, force=False):
"""
Initialize and (optionally) open an i2c bus connection.
:param bus: i2c bus number (e.g. 0 or 1). If not given, a subsequent
call to ``open()`` is required.
:type bus: int
:param force: force using the slave address even when driver is
already using it.
:type force: boolean
"""
self.fd = None
self.funcs = 0
if bus is not None:
self.open(bus)
self.address = None
self.force = force
self._force_last = None
def open(self, bus):
"""
Open a given i2c bus.
:param bus: i2c bus number (e.g. 0 or 1)
:type bus: int
"""
self.fd = os.open("/dev/i2c-{}".format(bus), os.O_RDWR)
self.funcs = self._get_funcs()
def close(self):
"""
Close the i2c connection.
"""
if self.fd:
os.close(self.fd)
self.fd = None
def _set_address(self, address, force=None):
"""
Set i2c slave address to use for subsequent calls.
:param address:
:type address: int
:param force:
:type force: Boolean
"""
force = force if force is not None else self.force
if self.address != address or self._force_last != force:
if force is True:
ioctl(self.fd, I2C_SLAVE_FORCE, address)
else:
ioctl(self.fd, I2C_SLAVE, address)
self.address = address
self._force_last = force
def _get_funcs(self):
"""
Returns a 32-bit value stating supported I2C functions.
:rtype: int
"""
f = c_uint32()
ioctl(self.fd, I2C_FUNCS, f)
return f.value
def read_byte(self, i2c_addr, force=None):
"""
Read a single byte from a device.
:rtype: int
:param i2c_addr: i2c address
:type i2c_addr: int
:param force:
:type force: Boolean
:return: Read byte value
"""
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_READ, command=0, size=I2C_SMBUS_BYTE
)
ioctl(self.fd, I2C_SMBUS, msg)
return msg.data.contents.byte
def write_byte(self, i2c_addr, value, force=None):
"""
Write a single byte to a device.
:param i2c_addr: i2c address
:type i2c_addr: int
:param value: value to write
:type value: int
:param force:
:type force: Boolean
"""
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_WRITE, command=value, size=I2C_SMBUS_BYTE
)
ioctl(self.fd, I2C_SMBUS, msg)
def read_byte_data(self, i2c_addr, register, force=None):
"""
Read a single byte from a designated register.
:param i2c_addr: i2c address
:type i2c_addr: int
:param register: Register to read
:type register: int
:param force:
:type force: Boolean
:return: Read byte value
:rtype: int
"""
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_READ, command=register, size=I2C_SMBUS_BYTE_DATA
)
ioctl(self.fd, I2C_SMBUS, msg)
return msg.data.contents.byte
def write_byte_data(self, i2c_addr, register, value, force=None):
"""
Write a byte to a given register.
:param i2c_addr: i2c address
:type i2c_addr: int
:param register: Register to write to
:type register: int
:param value: Byte value to transmit
:type value: int
:param force:
:type force: Boolean
:rtype: None
"""
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_WRITE, command=register, size=I2C_SMBUS_BYTE_DATA
)
msg.data.contents.byte = value
ioctl(self.fd, I2C_SMBUS, msg)
def read_word_data(self, i2c_addr, register, force=None):
"""
Read a single word (2 bytes) from a given register.
:param i2c_addr: i2c address
:type i2c_addr: int
:param register: Register to read
:type register: int
:param force:
:type force: Boolean
:return: 2-byte word
:rtype: int
"""
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_READ, command=register, size=I2C_SMBUS_WORD_DATA
)
ioctl(self.fd, I2C_SMBUS, msg)
return msg.data.contents.word
def write_word_data(self, i2c_addr, register, value, force=None):
"""
Write a byte to a given register.
:param i2c_addr: i2c address
:type i2c_addr: int
:param register: Register to write to
:type register: int
:param value: Word value to transmit
:type value: int
:param force:
:type force: Boolean
:rtype: None
"""
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_WRITE, command=register, size=I2C_SMBUS_WORD_DATA
)
msg.data.contents.word = value
ioctl(self.fd, I2C_SMBUS, msg)
def process_call(self, i2c_addr, register, value, force=None):
"""
Executes a SMBus Process Call, sending a 16-bit value and receiving a 16-bit response
:param i2c_addr: i2c address
:type i2c_addr: int
:param register: Register to read/write to
:type register: int
:param value: Word value to transmit
:type value: int
:param force:
:type force: Boolean
:rtype: int
"""
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_WRITE, command=register, size=I2C_SMBUS_PROC_CALL
)
msg.data.contents.word = value
ioctl(self.fd, I2C_SMBUS, msg)
return msg.data.contents.word
def read_block_data(self, i2c_addr, register, force=None):
"""
Read a block of up to 32-bytes from a given register.
:param i2c_addr: i2c address
:type i2c_addr: int
:param register: Start register
:type register: int
:param force:
:type force: Boolean
:return: List of bytes
:rtype: list
"""
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_READ, command=register, size=I2C_SMBUS_BLOCK_DATA
)
ioctl(self.fd, I2C_SMBUS, msg)
length = msg.data.contents.block[0]
return msg.data.contents.block[1:length + 1]
def write_block_data(self, i2c_addr, register, data, force=None):
"""
Write a block of byte data to a given register.
:param i2c_addr: i2c address
:type i2c_addr: int
:param register: Start register
:type register: int
:param data: List of bytes
:type data: list
:param force:
:type force: Boolean
:rtype: None
"""
length = len(data)
if length > I2C_SMBUS_BLOCK_MAX:
raise ValueError("Data length cannot exceed %d bytes" % I2C_SMBUS_BLOCK_MAX)
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_WRITE, command=register, size=I2C_SMBUS_BLOCK_DATA
)
msg.data.contents.block[0] = length
msg.data.contents.block[1:length + 1] = data
ioctl(self.fd, I2C_SMBUS, msg)
def block_process_call(self, i2c_addr, register, data, force=None):
"""
Executes a SMBus Block Process Call, sending a variable-size data block and receiving another variable-size response
:param i2c_addr: i2c address
:type i2c_addr: int
:param register: Register to read/write to
:type register: int
:param data: List of bytes
:type data: list
:param force:
:type force: Boolean
:return: List of bytes
:rtype: list
"""
length = len(data)
if length > I2C_SMBUS_BLOCK_MAX:
raise ValueError("Data length cannot exceed %d bytes" % I2C_SMBUS_BLOCK_MAX)
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_WRITE, command=register, size=I2C_SMBUS_BLOCK_PROC_CALL
)
msg.data.contents.block[0] = length
msg.data.contents.block[1:length + 1] = data
ioctl(self.fd, I2C_SMBUS, msg)
length = msg.data.contents.block[0]
return msg.data.contents.block[1:length + 1]
def read_i2c_block_data(self, i2c_addr, register, length, force=None):
"""
Read a block of byte data from a given register.
:param i2c_addr: i2c address
:type i2c_addr: int
:param register: Start register
:type register: int
:param length: Desired block length
:type length: int
:param force:
:type force: Boolean
:return: List of bytes
:rtype: list
"""
if length > I2C_SMBUS_BLOCK_MAX:
raise ValueError("Desired block length over %d bytes" % I2C_SMBUS_BLOCK_MAX)
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_READ, command=register, size=I2C_SMBUS_I2C_BLOCK_DATA
)
msg.data.contents.byte = length
ioctl(self.fd, I2C_SMBUS, msg)
return msg.data.contents.block[1:length + 1]
def write_i2c_block_data(self, i2c_addr, register, data, force=None):
"""
Write a block of byte data to a given register.
:param i2c_addr: i2c address
:type i2c_addr: int
:param register: Start register
:type register: int
:param data: List of bytes
:type data: list
:param force:
:type force: Boolean
:rtype: None
"""
length = len(data)
if length > I2C_SMBUS_BLOCK_MAX:
raise ValueError("Data length cannot exceed %d bytes" % I2C_SMBUS_BLOCK_MAX)
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_WRITE, command=register, size=I2C_SMBUS_I2C_BLOCK_DATA
)
msg.data.contents.block[0] = length
msg.data.contents.block[1:length + 1] = data
ioctl(self.fd, I2C_SMBUS, msg)
def i2c_rdwr(self, *i2c_msgs):
"""
Combine a series of i2c read and write operations in a single
transaction (with repeated start bits but no stop bits in between).
This method takes i2c_msg instances as input, which must be created
first with :py:meth:`i2c_msg.read` or :py:meth:`i2c_msg.write`.
:param i2c_msgs: One or more i2c_msg class instances.
:type i2c_msgs: i2c_msg
:rtype: None
"""
ioctl_data = i2c_rdwr_ioctl_data.create(*i2c_msgs)
ioctl(self.fd, I2C_RDWR, ioctl_data)
|
kplindegaard/smbus2
|
smbus2/smbus2.py
|
SMBus.read_byte
|
python
|
def read_byte(self, i2c_addr, force=None):
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_READ, command=0, size=I2C_SMBUS_BYTE
)
ioctl(self.fd, I2C_SMBUS, msg)
return msg.data.contents.byte
|
Read a single byte from a device.
:rtype: int
:param i2c_addr: i2c address
:type i2c_addr: int
:param force:
:type force: Boolean
:return: Read byte value
|
train
|
https://github.com/kplindegaard/smbus2/blob/a1088a03438dba84c266b73ad61b0c06750d0961/smbus2/smbus2.py#L333-L349
|
[
"def create(read_write=I2C_SMBUS_READ, command=0, size=I2C_SMBUS_BYTE_DATA):\n u = union_i2c_smbus_data()\n return i2c_smbus_ioctl_data(\n read_write=read_write, command=command, size=size,\n data=union_pointer_type(u))\n",
"def _set_address(self, address, force=None):\n \"\"\"\n Set i2c slave address to use for subsequent calls.\n\n :param address:\n :type address: int\n :param force:\n :type force: Boolean\n \"\"\"\n force = force if force is not None else self.force\n if self.address != address or self._force_last != force:\n if force is True:\n ioctl(self.fd, I2C_SLAVE_FORCE, address)\n else:\n ioctl(self.fd, I2C_SLAVE, address)\n self.address = address\n self._force_last = force\n"
] |
class SMBus(object):
def __init__(self, bus=None, force=False):
"""
Initialize and (optionally) open an i2c bus connection.
:param bus: i2c bus number (e.g. 0 or 1). If not given, a subsequent
call to ``open()`` is required.
:type bus: int
:param force: force using the slave address even when driver is
already using it.
:type force: boolean
"""
self.fd = None
self.funcs = 0
if bus is not None:
self.open(bus)
self.address = None
self.force = force
self._force_last = None
def open(self, bus):
"""
Open a given i2c bus.
:param bus: i2c bus number (e.g. 0 or 1)
:type bus: int
"""
self.fd = os.open("/dev/i2c-{}".format(bus), os.O_RDWR)
self.funcs = self._get_funcs()
def close(self):
"""
Close the i2c connection.
"""
if self.fd:
os.close(self.fd)
self.fd = None
def _set_address(self, address, force=None):
"""
Set i2c slave address to use for subsequent calls.
:param address:
:type address: int
:param force:
:type force: Boolean
"""
force = force if force is not None else self.force
if self.address != address or self._force_last != force:
if force is True:
ioctl(self.fd, I2C_SLAVE_FORCE, address)
else:
ioctl(self.fd, I2C_SLAVE, address)
self.address = address
self._force_last = force
def _get_funcs(self):
"""
Returns a 32-bit value stating supported I2C functions.
:rtype: int
"""
f = c_uint32()
ioctl(self.fd, I2C_FUNCS, f)
return f.value
def write_quick(self, i2c_addr, force=None):
"""
Perform quick transaction. Throws IOError if unsuccessful.
:param i2c_addr: i2c address
:type i2c_addr: int
:param force:
:type force: Boolean
"""
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_WRITE, command=0, size=I2C_SMBUS_QUICK)
ioctl(self.fd, I2C_SMBUS, msg)
def write_byte(self, i2c_addr, value, force=None):
"""
Write a single byte to a device.
:param i2c_addr: i2c address
:type i2c_addr: int
:param value: value to write
:type value: int
:param force:
:type force: Boolean
"""
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_WRITE, command=value, size=I2C_SMBUS_BYTE
)
ioctl(self.fd, I2C_SMBUS, msg)
def read_byte_data(self, i2c_addr, register, force=None):
"""
Read a single byte from a designated register.
:param i2c_addr: i2c address
:type i2c_addr: int
:param register: Register to read
:type register: int
:param force:
:type force: Boolean
:return: Read byte value
:rtype: int
"""
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_READ, command=register, size=I2C_SMBUS_BYTE_DATA
)
ioctl(self.fd, I2C_SMBUS, msg)
return msg.data.contents.byte
def write_byte_data(self, i2c_addr, register, value, force=None):
"""
Write a byte to a given register.
:param i2c_addr: i2c address
:type i2c_addr: int
:param register: Register to write to
:type register: int
:param value: Byte value to transmit
:type value: int
:param force:
:type force: Boolean
:rtype: None
"""
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_WRITE, command=register, size=I2C_SMBUS_BYTE_DATA
)
msg.data.contents.byte = value
ioctl(self.fd, I2C_SMBUS, msg)
def read_word_data(self, i2c_addr, register, force=None):
"""
Read a single word (2 bytes) from a given register.
:param i2c_addr: i2c address
:type i2c_addr: int
:param register: Register to read
:type register: int
:param force:
:type force: Boolean
:return: 2-byte word
:rtype: int
"""
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_READ, command=register, size=I2C_SMBUS_WORD_DATA
)
ioctl(self.fd, I2C_SMBUS, msg)
return msg.data.contents.word
def write_word_data(self, i2c_addr, register, value, force=None):
"""
Write a byte to a given register.
:param i2c_addr: i2c address
:type i2c_addr: int
:param register: Register to write to
:type register: int
:param value: Word value to transmit
:type value: int
:param force:
:type force: Boolean
:rtype: None
"""
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_WRITE, command=register, size=I2C_SMBUS_WORD_DATA
)
msg.data.contents.word = value
ioctl(self.fd, I2C_SMBUS, msg)
def process_call(self, i2c_addr, register, value, force=None):
"""
Executes a SMBus Process Call, sending a 16-bit value and receiving a 16-bit response
:param i2c_addr: i2c address
:type i2c_addr: int
:param register: Register to read/write to
:type register: int
:param value: Word value to transmit
:type value: int
:param force:
:type force: Boolean
:rtype: int
"""
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_WRITE, command=register, size=I2C_SMBUS_PROC_CALL
)
msg.data.contents.word = value
ioctl(self.fd, I2C_SMBUS, msg)
return msg.data.contents.word
def read_block_data(self, i2c_addr, register, force=None):
"""
Read a block of up to 32-bytes from a given register.
:param i2c_addr: i2c address
:type i2c_addr: int
:param register: Start register
:type register: int
:param force:
:type force: Boolean
:return: List of bytes
:rtype: list
"""
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_READ, command=register, size=I2C_SMBUS_BLOCK_DATA
)
ioctl(self.fd, I2C_SMBUS, msg)
length = msg.data.contents.block[0]
return msg.data.contents.block[1:length + 1]
def write_block_data(self, i2c_addr, register, data, force=None):
"""
Write a block of byte data to a given register.
:param i2c_addr: i2c address
:type i2c_addr: int
:param register: Start register
:type register: int
:param data: List of bytes
:type data: list
:param force:
:type force: Boolean
:rtype: None
"""
length = len(data)
if length > I2C_SMBUS_BLOCK_MAX:
raise ValueError("Data length cannot exceed %d bytes" % I2C_SMBUS_BLOCK_MAX)
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_WRITE, command=register, size=I2C_SMBUS_BLOCK_DATA
)
msg.data.contents.block[0] = length
msg.data.contents.block[1:length + 1] = data
ioctl(self.fd, I2C_SMBUS, msg)
def block_process_call(self, i2c_addr, register, data, force=None):
"""
Executes a SMBus Block Process Call, sending a variable-size data block and receiving another variable-size response
:param i2c_addr: i2c address
:type i2c_addr: int
:param register: Register to read/write to
:type register: int
:param data: List of bytes
:type data: list
:param force:
:type force: Boolean
:return: List of bytes
:rtype: list
"""
length = len(data)
if length > I2C_SMBUS_BLOCK_MAX:
raise ValueError("Data length cannot exceed %d bytes" % I2C_SMBUS_BLOCK_MAX)
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_WRITE, command=register, size=I2C_SMBUS_BLOCK_PROC_CALL
)
msg.data.contents.block[0] = length
msg.data.contents.block[1:length + 1] = data
ioctl(self.fd, I2C_SMBUS, msg)
length = msg.data.contents.block[0]
return msg.data.contents.block[1:length + 1]
def read_i2c_block_data(self, i2c_addr, register, length, force=None):
"""
Read a block of byte data from a given register.
:param i2c_addr: i2c address
:type i2c_addr: int
:param register: Start register
:type register: int
:param length: Desired block length
:type length: int
:param force:
:type force: Boolean
:return: List of bytes
:rtype: list
"""
if length > I2C_SMBUS_BLOCK_MAX:
raise ValueError("Desired block length over %d bytes" % I2C_SMBUS_BLOCK_MAX)
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_READ, command=register, size=I2C_SMBUS_I2C_BLOCK_DATA
)
msg.data.contents.byte = length
ioctl(self.fd, I2C_SMBUS, msg)
return msg.data.contents.block[1:length + 1]
def write_i2c_block_data(self, i2c_addr, register, data, force=None):
"""
Write a block of byte data to a given register.
:param i2c_addr: i2c address
:type i2c_addr: int
:param register: Start register
:type register: int
:param data: List of bytes
:type data: list
:param force:
:type force: Boolean
:rtype: None
"""
length = len(data)
if length > I2C_SMBUS_BLOCK_MAX:
raise ValueError("Data length cannot exceed %d bytes" % I2C_SMBUS_BLOCK_MAX)
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_WRITE, command=register, size=I2C_SMBUS_I2C_BLOCK_DATA
)
msg.data.contents.block[0] = length
msg.data.contents.block[1:length + 1] = data
ioctl(self.fd, I2C_SMBUS, msg)
def i2c_rdwr(self, *i2c_msgs):
"""
Combine a series of i2c read and write operations in a single
transaction (with repeated start bits but no stop bits in between).
This method takes i2c_msg instances as input, which must be created
first with :py:meth:`i2c_msg.read` or :py:meth:`i2c_msg.write`.
:param i2c_msgs: One or more i2c_msg class instances.
:type i2c_msgs: i2c_msg
:rtype: None
"""
ioctl_data = i2c_rdwr_ioctl_data.create(*i2c_msgs)
ioctl(self.fd, I2C_RDWR, ioctl_data)
|
kplindegaard/smbus2
|
smbus2/smbus2.py
|
SMBus.write_byte
|
python
|
def write_byte(self, i2c_addr, value, force=None):
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_WRITE, command=value, size=I2C_SMBUS_BYTE
)
ioctl(self.fd, I2C_SMBUS, msg)
|
Write a single byte to a device.
:param i2c_addr: i2c address
:type i2c_addr: int
:param value: value to write
:type value: int
:param force:
:type force: Boolean
|
train
|
https://github.com/kplindegaard/smbus2/blob/a1088a03438dba84c266b73ad61b0c06750d0961/smbus2/smbus2.py#L351-L366
|
[
"def create(read_write=I2C_SMBUS_READ, command=0, size=I2C_SMBUS_BYTE_DATA):\n u = union_i2c_smbus_data()\n return i2c_smbus_ioctl_data(\n read_write=read_write, command=command, size=size,\n data=union_pointer_type(u))\n",
"def _set_address(self, address, force=None):\n \"\"\"\n Set i2c slave address to use for subsequent calls.\n\n :param address:\n :type address: int\n :param force:\n :type force: Boolean\n \"\"\"\n force = force if force is not None else self.force\n if self.address != address or self._force_last != force:\n if force is True:\n ioctl(self.fd, I2C_SLAVE_FORCE, address)\n else:\n ioctl(self.fd, I2C_SLAVE, address)\n self.address = address\n self._force_last = force\n"
] |
class SMBus(object):
def __init__(self, bus=None, force=False):
"""
Initialize and (optionally) open an i2c bus connection.
:param bus: i2c bus number (e.g. 0 or 1). If not given, a subsequent
call to ``open()`` is required.
:type bus: int
:param force: force using the slave address even when driver is
already using it.
:type force: boolean
"""
self.fd = None
self.funcs = 0
if bus is not None:
self.open(bus)
self.address = None
self.force = force
self._force_last = None
def open(self, bus):
"""
Open a given i2c bus.
:param bus: i2c bus number (e.g. 0 or 1)
:type bus: int
"""
self.fd = os.open("/dev/i2c-{}".format(bus), os.O_RDWR)
self.funcs = self._get_funcs()
def close(self):
"""
Close the i2c connection.
"""
if self.fd:
os.close(self.fd)
self.fd = None
def _set_address(self, address, force=None):
"""
Set i2c slave address to use for subsequent calls.
:param address:
:type address: int
:param force:
:type force: Boolean
"""
force = force if force is not None else self.force
if self.address != address or self._force_last != force:
if force is True:
ioctl(self.fd, I2C_SLAVE_FORCE, address)
else:
ioctl(self.fd, I2C_SLAVE, address)
self.address = address
self._force_last = force
def _get_funcs(self):
"""
Returns a 32-bit value stating supported I2C functions.
:rtype: int
"""
f = c_uint32()
ioctl(self.fd, I2C_FUNCS, f)
return f.value
def write_quick(self, i2c_addr, force=None):
"""
Perform quick transaction. Throws IOError if unsuccessful.
:param i2c_addr: i2c address
:type i2c_addr: int
:param force:
:type force: Boolean
"""
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_WRITE, command=0, size=I2C_SMBUS_QUICK)
ioctl(self.fd, I2C_SMBUS, msg)
def read_byte(self, i2c_addr, force=None):
"""
Read a single byte from a device.
:rtype: int
:param i2c_addr: i2c address
:type i2c_addr: int
:param force:
:type force: Boolean
:return: Read byte value
"""
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_READ, command=0, size=I2C_SMBUS_BYTE
)
ioctl(self.fd, I2C_SMBUS, msg)
return msg.data.contents.byte
def read_byte_data(self, i2c_addr, register, force=None):
"""
Read a single byte from a designated register.
:param i2c_addr: i2c address
:type i2c_addr: int
:param register: Register to read
:type register: int
:param force:
:type force: Boolean
:return: Read byte value
:rtype: int
"""
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_READ, command=register, size=I2C_SMBUS_BYTE_DATA
)
ioctl(self.fd, I2C_SMBUS, msg)
return msg.data.contents.byte
def write_byte_data(self, i2c_addr, register, value, force=None):
"""
Write a byte to a given register.
:param i2c_addr: i2c address
:type i2c_addr: int
:param register: Register to write to
:type register: int
:param value: Byte value to transmit
:type value: int
:param force:
:type force: Boolean
:rtype: None
"""
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_WRITE, command=register, size=I2C_SMBUS_BYTE_DATA
)
msg.data.contents.byte = value
ioctl(self.fd, I2C_SMBUS, msg)
def read_word_data(self, i2c_addr, register, force=None):
"""
Read a single word (2 bytes) from a given register.
:param i2c_addr: i2c address
:type i2c_addr: int
:param register: Register to read
:type register: int
:param force:
:type force: Boolean
:return: 2-byte word
:rtype: int
"""
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_READ, command=register, size=I2C_SMBUS_WORD_DATA
)
ioctl(self.fd, I2C_SMBUS, msg)
return msg.data.contents.word
def write_word_data(self, i2c_addr, register, value, force=None):
"""
Write a byte to a given register.
:param i2c_addr: i2c address
:type i2c_addr: int
:param register: Register to write to
:type register: int
:param value: Word value to transmit
:type value: int
:param force:
:type force: Boolean
:rtype: None
"""
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_WRITE, command=register, size=I2C_SMBUS_WORD_DATA
)
msg.data.contents.word = value
ioctl(self.fd, I2C_SMBUS, msg)
def process_call(self, i2c_addr, register, value, force=None):
"""
Executes a SMBus Process Call, sending a 16-bit value and receiving a 16-bit response
:param i2c_addr: i2c address
:type i2c_addr: int
:param register: Register to read/write to
:type register: int
:param value: Word value to transmit
:type value: int
:param force:
:type force: Boolean
:rtype: int
"""
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_WRITE, command=register, size=I2C_SMBUS_PROC_CALL
)
msg.data.contents.word = value
ioctl(self.fd, I2C_SMBUS, msg)
return msg.data.contents.word
def read_block_data(self, i2c_addr, register, force=None):
"""
Read a block of up to 32-bytes from a given register.
:param i2c_addr: i2c address
:type i2c_addr: int
:param register: Start register
:type register: int
:param force:
:type force: Boolean
:return: List of bytes
:rtype: list
"""
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_READ, command=register, size=I2C_SMBUS_BLOCK_DATA
)
ioctl(self.fd, I2C_SMBUS, msg)
length = msg.data.contents.block[0]
return msg.data.contents.block[1:length + 1]
def write_block_data(self, i2c_addr, register, data, force=None):
"""
Write a block of byte data to a given register.
:param i2c_addr: i2c address
:type i2c_addr: int
:param register: Start register
:type register: int
:param data: List of bytes
:type data: list
:param force:
:type force: Boolean
:rtype: None
"""
length = len(data)
if length > I2C_SMBUS_BLOCK_MAX:
raise ValueError("Data length cannot exceed %d bytes" % I2C_SMBUS_BLOCK_MAX)
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_WRITE, command=register, size=I2C_SMBUS_BLOCK_DATA
)
msg.data.contents.block[0] = length
msg.data.contents.block[1:length + 1] = data
ioctl(self.fd, I2C_SMBUS, msg)
def block_process_call(self, i2c_addr, register, data, force=None):
"""
Executes a SMBus Block Process Call, sending a variable-size data block and receiving another variable-size response
:param i2c_addr: i2c address
:type i2c_addr: int
:param register: Register to read/write to
:type register: int
:param data: List of bytes
:type data: list
:param force:
:type force: Boolean
:return: List of bytes
:rtype: list
"""
length = len(data)
if length > I2C_SMBUS_BLOCK_MAX:
raise ValueError("Data length cannot exceed %d bytes" % I2C_SMBUS_BLOCK_MAX)
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_WRITE, command=register, size=I2C_SMBUS_BLOCK_PROC_CALL
)
msg.data.contents.block[0] = length
msg.data.contents.block[1:length + 1] = data
ioctl(self.fd, I2C_SMBUS, msg)
length = msg.data.contents.block[0]
return msg.data.contents.block[1:length + 1]
def read_i2c_block_data(self, i2c_addr, register, length, force=None):
"""
Read a block of byte data from a given register.
:param i2c_addr: i2c address
:type i2c_addr: int
:param register: Start register
:type register: int
:param length: Desired block length
:type length: int
:param force:
:type force: Boolean
:return: List of bytes
:rtype: list
"""
if length > I2C_SMBUS_BLOCK_MAX:
raise ValueError("Desired block length over %d bytes" % I2C_SMBUS_BLOCK_MAX)
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_READ, command=register, size=I2C_SMBUS_I2C_BLOCK_DATA
)
msg.data.contents.byte = length
ioctl(self.fd, I2C_SMBUS, msg)
return msg.data.contents.block[1:length + 1]
def write_i2c_block_data(self, i2c_addr, register, data, force=None):
"""
Write a block of byte data to a given register.
:param i2c_addr: i2c address
:type i2c_addr: int
:param register: Start register
:type register: int
:param data: List of bytes
:type data: list
:param force:
:type force: Boolean
:rtype: None
"""
length = len(data)
if length > I2C_SMBUS_BLOCK_MAX:
raise ValueError("Data length cannot exceed %d bytes" % I2C_SMBUS_BLOCK_MAX)
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_WRITE, command=register, size=I2C_SMBUS_I2C_BLOCK_DATA
)
msg.data.contents.block[0] = length
msg.data.contents.block[1:length + 1] = data
ioctl(self.fd, I2C_SMBUS, msg)
def i2c_rdwr(self, *i2c_msgs):
"""
Combine a series of i2c read and write operations in a single
transaction (with repeated start bits but no stop bits in between).
This method takes i2c_msg instances as input, which must be created
first with :py:meth:`i2c_msg.read` or :py:meth:`i2c_msg.write`.
:param i2c_msgs: One or more i2c_msg class instances.
:type i2c_msgs: i2c_msg
:rtype: None
"""
ioctl_data = i2c_rdwr_ioctl_data.create(*i2c_msgs)
ioctl(self.fd, I2C_RDWR, ioctl_data)
|
kplindegaard/smbus2
|
smbus2/smbus2.py
|
SMBus.read_byte_data
|
python
|
def read_byte_data(self, i2c_addr, register, force=None):
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_READ, command=register, size=I2C_SMBUS_BYTE_DATA
)
ioctl(self.fd, I2C_SMBUS, msg)
return msg.data.contents.byte
|
Read a single byte from a designated register.
:param i2c_addr: i2c address
:type i2c_addr: int
:param register: Register to read
:type register: int
:param force:
:type force: Boolean
:return: Read byte value
:rtype: int
|
train
|
https://github.com/kplindegaard/smbus2/blob/a1088a03438dba84c266b73ad61b0c06750d0961/smbus2/smbus2.py#L368-L386
|
[
"def create(read_write=I2C_SMBUS_READ, command=0, size=I2C_SMBUS_BYTE_DATA):\n u = union_i2c_smbus_data()\n return i2c_smbus_ioctl_data(\n read_write=read_write, command=command, size=size,\n data=union_pointer_type(u))\n",
"def _set_address(self, address, force=None):\n \"\"\"\n Set i2c slave address to use for subsequent calls.\n\n :param address:\n :type address: int\n :param force:\n :type force: Boolean\n \"\"\"\n force = force if force is not None else self.force\n if self.address != address or self._force_last != force:\n if force is True:\n ioctl(self.fd, I2C_SLAVE_FORCE, address)\n else:\n ioctl(self.fd, I2C_SLAVE, address)\n self.address = address\n self._force_last = force\n"
] |
class SMBus(object):
def __init__(self, bus=None, force=False):
"""
Initialize and (optionally) open an i2c bus connection.
:param bus: i2c bus number (e.g. 0 or 1). If not given, a subsequent
call to ``open()`` is required.
:type bus: int
:param force: force using the slave address even when driver is
already using it.
:type force: boolean
"""
self.fd = None
self.funcs = 0
if bus is not None:
self.open(bus)
self.address = None
self.force = force
self._force_last = None
def open(self, bus):
"""
Open a given i2c bus.
:param bus: i2c bus number (e.g. 0 or 1)
:type bus: int
"""
self.fd = os.open("/dev/i2c-{}".format(bus), os.O_RDWR)
self.funcs = self._get_funcs()
def close(self):
"""
Close the i2c connection.
"""
if self.fd:
os.close(self.fd)
self.fd = None
def _set_address(self, address, force=None):
"""
Set i2c slave address to use for subsequent calls.
:param address:
:type address: int
:param force:
:type force: Boolean
"""
force = force if force is not None else self.force
if self.address != address or self._force_last != force:
if force is True:
ioctl(self.fd, I2C_SLAVE_FORCE, address)
else:
ioctl(self.fd, I2C_SLAVE, address)
self.address = address
self._force_last = force
def _get_funcs(self):
"""
Returns a 32-bit value stating supported I2C functions.
:rtype: int
"""
f = c_uint32()
ioctl(self.fd, I2C_FUNCS, f)
return f.value
def write_quick(self, i2c_addr, force=None):
"""
Perform quick transaction. Throws IOError if unsuccessful.
:param i2c_addr: i2c address
:type i2c_addr: int
:param force:
:type force: Boolean
"""
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_WRITE, command=0, size=I2C_SMBUS_QUICK)
ioctl(self.fd, I2C_SMBUS, msg)
def read_byte(self, i2c_addr, force=None):
"""
Read a single byte from a device.
:rtype: int
:param i2c_addr: i2c address
:type i2c_addr: int
:param force:
:type force: Boolean
:return: Read byte value
"""
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_READ, command=0, size=I2C_SMBUS_BYTE
)
ioctl(self.fd, I2C_SMBUS, msg)
return msg.data.contents.byte
def write_byte(self, i2c_addr, value, force=None):
"""
Write a single byte to a device.
:param i2c_addr: i2c address
:type i2c_addr: int
:param value: value to write
:type value: int
:param force:
:type force: Boolean
"""
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_WRITE, command=value, size=I2C_SMBUS_BYTE
)
ioctl(self.fd, I2C_SMBUS, msg)
def write_byte_data(self, i2c_addr, register, value, force=None):
"""
Write a byte to a given register.
:param i2c_addr: i2c address
:type i2c_addr: int
:param register: Register to write to
:type register: int
:param value: Byte value to transmit
:type value: int
:param force:
:type force: Boolean
:rtype: None
"""
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_WRITE, command=register, size=I2C_SMBUS_BYTE_DATA
)
msg.data.contents.byte = value
ioctl(self.fd, I2C_SMBUS, msg)
def read_word_data(self, i2c_addr, register, force=None):
"""
Read a single word (2 bytes) from a given register.
:param i2c_addr: i2c address
:type i2c_addr: int
:param register: Register to read
:type register: int
:param force:
:type force: Boolean
:return: 2-byte word
:rtype: int
"""
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_READ, command=register, size=I2C_SMBUS_WORD_DATA
)
ioctl(self.fd, I2C_SMBUS, msg)
return msg.data.contents.word
def write_word_data(self, i2c_addr, register, value, force=None):
"""
Write a byte to a given register.
:param i2c_addr: i2c address
:type i2c_addr: int
:param register: Register to write to
:type register: int
:param value: Word value to transmit
:type value: int
:param force:
:type force: Boolean
:rtype: None
"""
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_WRITE, command=register, size=I2C_SMBUS_WORD_DATA
)
msg.data.contents.word = value
ioctl(self.fd, I2C_SMBUS, msg)
def process_call(self, i2c_addr, register, value, force=None):
"""
Executes a SMBus Process Call, sending a 16-bit value and receiving a 16-bit response
:param i2c_addr: i2c address
:type i2c_addr: int
:param register: Register to read/write to
:type register: int
:param value: Word value to transmit
:type value: int
:param force:
:type force: Boolean
:rtype: int
"""
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_WRITE, command=register, size=I2C_SMBUS_PROC_CALL
)
msg.data.contents.word = value
ioctl(self.fd, I2C_SMBUS, msg)
return msg.data.contents.word
def read_block_data(self, i2c_addr, register, force=None):
"""
Read a block of up to 32-bytes from a given register.
:param i2c_addr: i2c address
:type i2c_addr: int
:param register: Start register
:type register: int
:param force:
:type force: Boolean
:return: List of bytes
:rtype: list
"""
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_READ, command=register, size=I2C_SMBUS_BLOCK_DATA
)
ioctl(self.fd, I2C_SMBUS, msg)
length = msg.data.contents.block[0]
return msg.data.contents.block[1:length + 1]
def write_block_data(self, i2c_addr, register, data, force=None):
"""
Write a block of byte data to a given register.
:param i2c_addr: i2c address
:type i2c_addr: int
:param register: Start register
:type register: int
:param data: List of bytes
:type data: list
:param force:
:type force: Boolean
:rtype: None
"""
length = len(data)
if length > I2C_SMBUS_BLOCK_MAX:
raise ValueError("Data length cannot exceed %d bytes" % I2C_SMBUS_BLOCK_MAX)
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_WRITE, command=register, size=I2C_SMBUS_BLOCK_DATA
)
msg.data.contents.block[0] = length
msg.data.contents.block[1:length + 1] = data
ioctl(self.fd, I2C_SMBUS, msg)
def block_process_call(self, i2c_addr, register, data, force=None):
"""
Executes a SMBus Block Process Call, sending a variable-size data block and receiving another variable-size response
:param i2c_addr: i2c address
:type i2c_addr: int
:param register: Register to read/write to
:type register: int
:param data: List of bytes
:type data: list
:param force:
:type force: Boolean
:return: List of bytes
:rtype: list
"""
length = len(data)
if length > I2C_SMBUS_BLOCK_MAX:
raise ValueError("Data length cannot exceed %d bytes" % I2C_SMBUS_BLOCK_MAX)
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_WRITE, command=register, size=I2C_SMBUS_BLOCK_PROC_CALL
)
msg.data.contents.block[0] = length
msg.data.contents.block[1:length + 1] = data
ioctl(self.fd, I2C_SMBUS, msg)
length = msg.data.contents.block[0]
return msg.data.contents.block[1:length + 1]
def read_i2c_block_data(self, i2c_addr, register, length, force=None):
"""
Read a block of byte data from a given register.
:param i2c_addr: i2c address
:type i2c_addr: int
:param register: Start register
:type register: int
:param length: Desired block length
:type length: int
:param force:
:type force: Boolean
:return: List of bytes
:rtype: list
"""
if length > I2C_SMBUS_BLOCK_MAX:
raise ValueError("Desired block length over %d bytes" % I2C_SMBUS_BLOCK_MAX)
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_READ, command=register, size=I2C_SMBUS_I2C_BLOCK_DATA
)
msg.data.contents.byte = length
ioctl(self.fd, I2C_SMBUS, msg)
return msg.data.contents.block[1:length + 1]
def write_i2c_block_data(self, i2c_addr, register, data, force=None):
"""
Write a block of byte data to a given register.
:param i2c_addr: i2c address
:type i2c_addr: int
:param register: Start register
:type register: int
:param data: List of bytes
:type data: list
:param force:
:type force: Boolean
:rtype: None
"""
length = len(data)
if length > I2C_SMBUS_BLOCK_MAX:
raise ValueError("Data length cannot exceed %d bytes" % I2C_SMBUS_BLOCK_MAX)
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_WRITE, command=register, size=I2C_SMBUS_I2C_BLOCK_DATA
)
msg.data.contents.block[0] = length
msg.data.contents.block[1:length + 1] = data
ioctl(self.fd, I2C_SMBUS, msg)
def i2c_rdwr(self, *i2c_msgs):
"""
Combine a series of i2c read and write operations in a single
transaction (with repeated start bits but no stop bits in between).
This method takes i2c_msg instances as input, which must be created
first with :py:meth:`i2c_msg.read` or :py:meth:`i2c_msg.write`.
:param i2c_msgs: One or more i2c_msg class instances.
:type i2c_msgs: i2c_msg
:rtype: None
"""
ioctl_data = i2c_rdwr_ioctl_data.create(*i2c_msgs)
ioctl(self.fd, I2C_RDWR, ioctl_data)
|
kplindegaard/smbus2
|
smbus2/smbus2.py
|
SMBus.write_byte_data
|
python
|
def write_byte_data(self, i2c_addr, register, value, force=None):
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_WRITE, command=register, size=I2C_SMBUS_BYTE_DATA
)
msg.data.contents.byte = value
ioctl(self.fd, I2C_SMBUS, msg)
|
Write a byte to a given register.
:param i2c_addr: i2c address
:type i2c_addr: int
:param register: Register to write to
:type register: int
:param value: Byte value to transmit
:type value: int
:param force:
:type force: Boolean
:rtype: None
|
train
|
https://github.com/kplindegaard/smbus2/blob/a1088a03438dba84c266b73ad61b0c06750d0961/smbus2/smbus2.py#L388-L407
|
[
"def create(read_write=I2C_SMBUS_READ, command=0, size=I2C_SMBUS_BYTE_DATA):\n u = union_i2c_smbus_data()\n return i2c_smbus_ioctl_data(\n read_write=read_write, command=command, size=size,\n data=union_pointer_type(u))\n",
"def _set_address(self, address, force=None):\n \"\"\"\n Set i2c slave address to use for subsequent calls.\n\n :param address:\n :type address: int\n :param force:\n :type force: Boolean\n \"\"\"\n force = force if force is not None else self.force\n if self.address != address or self._force_last != force:\n if force is True:\n ioctl(self.fd, I2C_SLAVE_FORCE, address)\n else:\n ioctl(self.fd, I2C_SLAVE, address)\n self.address = address\n self._force_last = force\n"
] |
class SMBus(object):
def __init__(self, bus=None, force=False):
"""
Initialize and (optionally) open an i2c bus connection.
:param bus: i2c bus number (e.g. 0 or 1). If not given, a subsequent
call to ``open()`` is required.
:type bus: int
:param force: force using the slave address even when driver is
already using it.
:type force: boolean
"""
self.fd = None
self.funcs = 0
if bus is not None:
self.open(bus)
self.address = None
self.force = force
self._force_last = None
def open(self, bus):
"""
Open a given i2c bus.
:param bus: i2c bus number (e.g. 0 or 1)
:type bus: int
"""
self.fd = os.open("/dev/i2c-{}".format(bus), os.O_RDWR)
self.funcs = self._get_funcs()
def close(self):
"""
Close the i2c connection.
"""
if self.fd:
os.close(self.fd)
self.fd = None
def _set_address(self, address, force=None):
"""
Set i2c slave address to use for subsequent calls.
:param address:
:type address: int
:param force:
:type force: Boolean
"""
force = force if force is not None else self.force
if self.address != address or self._force_last != force:
if force is True:
ioctl(self.fd, I2C_SLAVE_FORCE, address)
else:
ioctl(self.fd, I2C_SLAVE, address)
self.address = address
self._force_last = force
def _get_funcs(self):
"""
Returns a 32-bit value stating supported I2C functions.
:rtype: int
"""
f = c_uint32()
ioctl(self.fd, I2C_FUNCS, f)
return f.value
def write_quick(self, i2c_addr, force=None):
"""
Perform quick transaction. Throws IOError if unsuccessful.
:param i2c_addr: i2c address
:type i2c_addr: int
:param force:
:type force: Boolean
"""
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_WRITE, command=0, size=I2C_SMBUS_QUICK)
ioctl(self.fd, I2C_SMBUS, msg)
def read_byte(self, i2c_addr, force=None):
"""
Read a single byte from a device.
:rtype: int
:param i2c_addr: i2c address
:type i2c_addr: int
:param force:
:type force: Boolean
:return: Read byte value
"""
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_READ, command=0, size=I2C_SMBUS_BYTE
)
ioctl(self.fd, I2C_SMBUS, msg)
return msg.data.contents.byte
def write_byte(self, i2c_addr, value, force=None):
"""
Write a single byte to a device.
:param i2c_addr: i2c address
:type i2c_addr: int
:param value: value to write
:type value: int
:param force:
:type force: Boolean
"""
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_WRITE, command=value, size=I2C_SMBUS_BYTE
)
ioctl(self.fd, I2C_SMBUS, msg)
def read_byte_data(self, i2c_addr, register, force=None):
"""
Read a single byte from a designated register.
:param i2c_addr: i2c address
:type i2c_addr: int
:param register: Register to read
:type register: int
:param force:
:type force: Boolean
:return: Read byte value
:rtype: int
"""
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_READ, command=register, size=I2C_SMBUS_BYTE_DATA
)
ioctl(self.fd, I2C_SMBUS, msg)
return msg.data.contents.byte
def read_word_data(self, i2c_addr, register, force=None):
"""
Read a single word (2 bytes) from a given register.
:param i2c_addr: i2c address
:type i2c_addr: int
:param register: Register to read
:type register: int
:param force:
:type force: Boolean
:return: 2-byte word
:rtype: int
"""
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_READ, command=register, size=I2C_SMBUS_WORD_DATA
)
ioctl(self.fd, I2C_SMBUS, msg)
return msg.data.contents.word
def write_word_data(self, i2c_addr, register, value, force=None):
"""
Write a byte to a given register.
:param i2c_addr: i2c address
:type i2c_addr: int
:param register: Register to write to
:type register: int
:param value: Word value to transmit
:type value: int
:param force:
:type force: Boolean
:rtype: None
"""
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_WRITE, command=register, size=I2C_SMBUS_WORD_DATA
)
msg.data.contents.word = value
ioctl(self.fd, I2C_SMBUS, msg)
def process_call(self, i2c_addr, register, value, force=None):
"""
Executes a SMBus Process Call, sending a 16-bit value and receiving a 16-bit response
:param i2c_addr: i2c address
:type i2c_addr: int
:param register: Register to read/write to
:type register: int
:param value: Word value to transmit
:type value: int
:param force:
:type force: Boolean
:rtype: int
"""
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_WRITE, command=register, size=I2C_SMBUS_PROC_CALL
)
msg.data.contents.word = value
ioctl(self.fd, I2C_SMBUS, msg)
return msg.data.contents.word
def read_block_data(self, i2c_addr, register, force=None):
"""
Read a block of up to 32-bytes from a given register.
:param i2c_addr: i2c address
:type i2c_addr: int
:param register: Start register
:type register: int
:param force:
:type force: Boolean
:return: List of bytes
:rtype: list
"""
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_READ, command=register, size=I2C_SMBUS_BLOCK_DATA
)
ioctl(self.fd, I2C_SMBUS, msg)
length = msg.data.contents.block[0]
return msg.data.contents.block[1:length + 1]
def write_block_data(self, i2c_addr, register, data, force=None):
"""
Write a block of byte data to a given register.
:param i2c_addr: i2c address
:type i2c_addr: int
:param register: Start register
:type register: int
:param data: List of bytes
:type data: list
:param force:
:type force: Boolean
:rtype: None
"""
length = len(data)
if length > I2C_SMBUS_BLOCK_MAX:
raise ValueError("Data length cannot exceed %d bytes" % I2C_SMBUS_BLOCK_MAX)
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_WRITE, command=register, size=I2C_SMBUS_BLOCK_DATA
)
msg.data.contents.block[0] = length
msg.data.contents.block[1:length + 1] = data
ioctl(self.fd, I2C_SMBUS, msg)
def block_process_call(self, i2c_addr, register, data, force=None):
"""
Executes a SMBus Block Process Call, sending a variable-size data block and receiving another variable-size response
:param i2c_addr: i2c address
:type i2c_addr: int
:param register: Register to read/write to
:type register: int
:param data: List of bytes
:type data: list
:param force:
:type force: Boolean
:return: List of bytes
:rtype: list
"""
length = len(data)
if length > I2C_SMBUS_BLOCK_MAX:
raise ValueError("Data length cannot exceed %d bytes" % I2C_SMBUS_BLOCK_MAX)
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_WRITE, command=register, size=I2C_SMBUS_BLOCK_PROC_CALL
)
msg.data.contents.block[0] = length
msg.data.contents.block[1:length + 1] = data
ioctl(self.fd, I2C_SMBUS, msg)
length = msg.data.contents.block[0]
return msg.data.contents.block[1:length + 1]
def read_i2c_block_data(self, i2c_addr, register, length, force=None):
"""
Read a block of byte data from a given register.
:param i2c_addr: i2c address
:type i2c_addr: int
:param register: Start register
:type register: int
:param length: Desired block length
:type length: int
:param force:
:type force: Boolean
:return: List of bytes
:rtype: list
"""
if length > I2C_SMBUS_BLOCK_MAX:
raise ValueError("Desired block length over %d bytes" % I2C_SMBUS_BLOCK_MAX)
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_READ, command=register, size=I2C_SMBUS_I2C_BLOCK_DATA
)
msg.data.contents.byte = length
ioctl(self.fd, I2C_SMBUS, msg)
return msg.data.contents.block[1:length + 1]
def write_i2c_block_data(self, i2c_addr, register, data, force=None):
"""
Write a block of byte data to a given register.
:param i2c_addr: i2c address
:type i2c_addr: int
:param register: Start register
:type register: int
:param data: List of bytes
:type data: list
:param force:
:type force: Boolean
:rtype: None
"""
length = len(data)
if length > I2C_SMBUS_BLOCK_MAX:
raise ValueError("Data length cannot exceed %d bytes" % I2C_SMBUS_BLOCK_MAX)
self._set_address(i2c_addr, force=force)
msg = i2c_smbus_ioctl_data.create(
read_write=I2C_SMBUS_WRITE, command=register, size=I2C_SMBUS_I2C_BLOCK_DATA
)
msg.data.contents.block[0] = length
msg.data.contents.block[1:length + 1] = data
ioctl(self.fd, I2C_SMBUS, msg)
def i2c_rdwr(self, *i2c_msgs):
"""
Combine a series of i2c read and write operations in a single
transaction (with repeated start bits but no stop bits in between).
This method takes i2c_msg instances as input, which must be created
first with :py:meth:`i2c_msg.read` or :py:meth:`i2c_msg.write`.
:param i2c_msgs: One or more i2c_msg class instances.
:type i2c_msgs: i2c_msg
:rtype: None
"""
ioctl_data = i2c_rdwr_ioctl_data.create(*i2c_msgs)
ioctl(self.fd, I2C_RDWR, ioctl_data)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.