repository_name stringclasses 316 values | func_path_in_repository stringlengths 6 223 | func_name stringlengths 1 134 | language stringclasses 1 value | func_code_string stringlengths 57 65.5k | func_documentation_string stringlengths 1 46.3k | split_name stringclasses 1 value | func_code_url stringlengths 91 315 | called_functions listlengths 1 156 ⌀ | enclosing_scope stringlengths 2 1.48M |
|---|---|---|---|---|---|---|---|---|---|
thumbor/thumbor | thumbor/filters/frame.py | Filter.handle_padding | python | def handle_padding(self, padding):
'''Pads the image with transparent pixels if necessary.'''
left = padding[0]
top = padding[1]
right = padding[2]
bottom = padding[3]
offset_x = 0
offset_y = 0
new_width = self.engine.size[0]
new_height = self.engine.size[1]
if left > 0:
offset_x = left
new_width += left
if top > 0:
offset_y = top
new_height += top
if right > 0:
new_width += right
if bottom > 0:
new_height += bottom
new_engine = self.context.modules.engine.__class__(self.context)
new_engine.image = new_engine.gen_image((new_width, new_height), '#fff')
new_engine.enable_alpha()
new_engine.paste(self.engine, (offset_x, offset_y))
self.engine.image = new_engine.image | Pads the image with transparent pixels if necessary. | train | https://github.com/thumbor/thumbor/blob/558ccdd6e3bc29e1c9ee3687372c4b3eb05ac607/thumbor/filters/frame.py#L50-L76 | null | class Filter(BaseFilter):
regex = r'(?:frame\((?P<url>.*?))'
def on_image_ready(self, buffer):
self.nine_patch_engine.load(buffer, None)
self.nine_patch_engine.enable_alpha()
self.engine.enable_alpha()
nine_patch_mode, nine_patch_data = self.nine_patch_engine.image_data_as_rgb()
padding = _nine_patch.get_padding(nine_patch_mode,
nine_patch_data,
self.nine_patch_engine.size[0],
self.nine_patch_engine.size[1])
self.handle_padding(padding)
mode, data = self.engine.image_data_as_rgb()
if mode != nine_patch_mode:
raise RuntimeError('Image mode mismatch: %s != %s' % (
mode, nine_patch_mode)
)
imgdata = _nine_patch.apply(mode,
data,
self.engine.size[0],
self.engine.size[1],
nine_patch_data,
self.nine_patch_engine.size[0],
self.nine_patch_engine.size[1])
self.engine.set_image_data(imgdata)
self.callback()
def on_fetch_done(self, result):
# TODO if result.successful is False how can the error be handled?
if isinstance(result, LoaderResult):
buffer = result.buffer
else:
buffer = result
self.nine_patch_engine.load(buffer, None)
self.storage.put(self.url, self.nine_patch_engine.read())
self.storage.put_crypto(self.url)
self.on_image_ready(buffer)
@filter_method(BaseFilter.String, async=True)
@tornado.gen.coroutine
def frame(self, callback, url):
self.url = url
self.callback = callback
self.nine_patch_engine = self.context.modules.engine.__class__(self.context)
self.storage = self.context.modules.storage
buffer = yield tornado.gen.maybe_future(self.storage.get(self.url))
if buffer is not None:
self.on_image_ready(buffer)
else:
self.context.modules.loader.load(self.context, self.url, self.on_fetch_done)
|
thumbor/thumbor | thumbor/transformer.py | Transformer.get_target_dimensions | python | def get_target_dimensions(self):
if self.target_height is None:
self._calculate_target_dimensions()
return int(self.target_width), int(self.target_height) | Returns the target dimensions and calculates them if necessary.
The target dimensions are display independent.
:return: Target dimensions as a tuple (width, height)
:rtype: (int, int) | train | https://github.com/thumbor/thumbor/blob/558ccdd6e3bc29e1c9ee3687372c4b3eb05ac607/thumbor/transformer.py#L58-L67 | [
"def _calculate_target_dimensions(self):\n source_width, source_height = self.engine.size\n source_width = float(source_width)\n source_height = float(source_height)\n\n if not self.context.request.width and not self.context.request.height:\n self.target_width = source_width\n self.target_... | class Transformer(object):
def __init__(self, context):
self.context = context
self.engine = self.context.request.engine
self.target_height = None
self.target_width = None
def _calculate_target_dimensions(self):
source_width, source_height = self.engine.size
source_width = float(source_width)
source_height = float(source_height)
if not self.context.request.width and not self.context.request.height:
self.target_width = source_width
self.target_height = source_height
else:
if self.context.request.width:
if self.context.request.width == "orig":
self.target_width = source_width
else:
self.target_width = float(self.context.request.width)
else:
self.target_width = self.engine.get_proportional_width(self.context.request.height)
if self.context.request.height:
if self.context.request.height == "orig":
self.target_height = source_height
else:
self.target_height = float(self.context.request.height)
else:
self.target_height = self.engine.get_proportional_height(self.context.request.width)
def adjust_focal_points(self):
source_width, source_height = self.engine.size
self.focal_points = None
if self.context.request.focal_points:
if self.context.request.should_crop:
self.focal_points = []
crop = self.context.request.crop
for point in self.context.request.focal_points:
if point.x < crop['left'] or point.x > crop['right'] or point.y < crop['top'] or point.y > crop['bottom']:
continue
point.x -= crop['left'] or 0
point.y -= crop['top'] or 0
self.focal_points.append(point)
else:
self.focal_points = self.context.request.focal_points
if not self.focal_points:
self.focal_points = [
FocalPoint.from_alignment(self.context.request.halign,
self.context.request.valign,
source_width,
source_height)
]
self.engine.focus(self.focal_points)
def transform(self, callback):
self.done_callback = callback
if self.context.config.RESPECT_ORIENTATION:
self.engine.reorientate()
self.trim()
self.smart_detect()
def trim(self):
is_gifsicle = (self.context.request.engine.extension == '.gif' and self.context.config.USE_GIFSICLE_ENGINE)
if self.context.request.trim is None or not trim_enabled or is_gifsicle:
return
mode, data = self.engine.image_data_as_rgb()
box = _bounding_box.apply(
mode,
self.engine.size[0],
self.engine.size[1],
self.context.request.trim_pos,
self.context.request.trim_tolerance,
data
)
if box[2] < box[0] or box[3] < box[1]:
logger.warn("Ignoring trim, there wouldn't be any image left, check the tolerance.")
return
self.engine.crop(box[0], box[1], box[2] + 1, box[3] + 1)
if self.context.request.should_crop:
self.context.request.crop['left'] -= box[0]
self.context.request.crop['top'] -= box[1]
self.context.request.crop['right'] -= box[0]
self.context.request.crop['bottom'] -= box[1]
@property
def smart_storage_key(self):
return self.context.request.image_url
@gen.coroutine
def smart_detect(self):
is_gifsicle = (self.context.request.engine.extension == '.gif' and self.context.config.USE_GIFSICLE_ENGINE)
if (not (self.context.modules.detectors and self.context.request.smart)) or is_gifsicle:
self.do_image_operations()
return
try:
# Beware! Boolean hell ahead.
#
# The `running_smart_detection` flag is needed so we can know
# whether `after_smart_detect()` is running synchronously or not.
#
# If we're running it in a sync fashion it will set
# `should_run_image_operations` to True so we can avoid running
# image operation inside the try block.
self.should_run_image_operations = False
self.running_smart_detection = True
yield self.do_smart_detection()
self.running_smart_detection = False
except Exception:
if not self.context.config.IGNORE_SMART_ERRORS:
raise
logger.exception("Ignored error during smart detection")
if self.context.config.USE_CUSTOM_ERROR_HANDLING:
self.context.modules.importer.error_handler.handle_error(
context=self.context,
handler=self.context.request_handler,
exception=sys.exc_info()
)
self.context.request.prevent_result_storage = True
self.context.request.detection_error = True
self.do_image_operations()
if self.should_run_image_operations:
self.do_image_operations()
@gen.coroutine
def do_smart_detection(self):
focal_points = yield gen.maybe_future(self.context.modules.storage.get_detector_data(self.smart_storage_key))
if focal_points is not None:
self.after_smart_detect(focal_points, points_from_storage=True)
else:
detectors = self.context.modules.detectors
detectors[0](self.context, index=0, detectors=detectors).detect(self.after_smart_detect)
def after_smart_detect(self, focal_points=[], points_from_storage=False):
for point in focal_points:
self.context.request.focal_points.append(FocalPoint.from_dict(point))
if self.context.request.focal_points and self.context.modules.storage and not points_from_storage:
storage = self.context.modules.storage
points = []
for point in self.context.request.focal_points:
points.append(point.to_dict())
storage.put_detector_data(self.smart_storage_key, points)
if self.running_smart_detection:
self.should_run_image_operations = True
return
self.do_image_operations()
def img_operation_worker(self):
if '.gif' == self.context.request.engine.extension and 'cover()' in self.context.request.filters:
self.extract_cover()
self.manual_crop()
self._calculate_target_dimensions()
self.adjust_focal_points()
if self.context.request.debug:
self.debug()
else:
if self.context.request.fit_in:
self.fit_in_resize()
else:
if not self.context.request.stretch:
self.auto_crop()
self.resize()
self.flip()
def do_image_operations(self):
"""
If ENGINE_THREADPOOL_SIZE > 0, this will schedule the image operations
into a threadpool. If not, it just executes them synchronously, and
calls self.done_callback when it's finished.
The actual work happens in self.img_operation_worker
"""
def inner(future):
self.done_callback()
self.context.thread_pool.queue(
operation=self.img_operation_worker,
callback=inner
)
def extract_cover(self):
self.engine.extract_cover()
def manual_crop(self):
if self.context.request.should_crop:
def limit(dimension, maximum):
return min(max(dimension, 0), maximum)
source_width, source_height = self.engine.size
crop = self.context.request.crop
crop['left'] = limit(crop['left'], source_width)
crop['top'] = limit(crop['top'], source_height)
crop['right'] = limit(crop['right'], source_width)
crop['bottom'] = limit(crop['bottom'], source_height)
if crop['left'] >= crop['right'] or crop['top'] >= crop['bottom']:
self.context.request.should_crop = False
crop['left'] = crop['right'] = crop['top'] = crop['bottom'] = 0
return
self.engine.crop(crop['left'], crop['top'], crop['right'], crop['bottom'])
def auto_crop(self):
source_width, source_height = self.engine.size
target_height = self.target_height or 1
target_width = self.target_width or 1
source_ratio = round(float(source_width) / source_height, 2)
target_ratio = round(float(target_width) / target_height, 2)
if source_ratio == target_ratio:
return
focal_x, focal_y = self.get_center_of_mass()
if self.target_width / source_width > self.target_height / source_height:
crop_width = source_width
crop_height = int(round(source_width * self.target_height / target_width, 0))
else:
crop_width = int(round(math.ceil(self.target_width * source_height / target_height), 0))
crop_height = source_height
crop_left = int(round(min(max(focal_x - (crop_width / 2), 0.0), source_width - crop_width)))
crop_right = min(crop_left + crop_width, source_width)
crop_top = int(round(min(max(focal_y - (crop_height / 2), 0.0), source_height - crop_height)))
crop_bottom = min(crop_top + crop_height, source_height)
self.engine.crop(crop_left, crop_top, crop_right, crop_bottom)
def flip(self):
if self.context.request.horizontal_flip:
self.engine.flip_horizontally()
if self.context.request.vertical_flip:
self.engine.flip_vertically()
def get_center_of_mass(self):
total_weight = 0.0
total_x = 0.0
total_y = 0.0
for focal_point in self.focal_points:
total_weight += focal_point.weight
total_x += focal_point.x * focal_point.weight
total_y += focal_point.y * focal_point.weight
x = total_x / total_weight
y = total_y / total_weight
return x, y
def resize(self):
source_width, source_height = self.engine.size
if self.target_width == source_width and self.target_height == source_height:
return
self.engine.resize(self.target_width or 1, self.target_height or 1) # avoiding 0px images
def fit_in_resize(self):
source_width, source_height = self.engine.size
# invert width and height if image orientation is not the same as request orientation and need adaptive
if self.context.request.adaptive and (
(source_width - source_height < 0 and self.target_width - self.target_height > 0) or
(source_width - source_height > 0 and self.target_width - self.target_height < 0)
):
tmp = self.context.request.width
self.context.request.width = self.context.request.height
self.context.request.height = tmp
tmp = self.target_width
self.target_width = self.target_height
self.target_height = tmp
sign = 1
if self.context.request.full:
sign = -1
if sign == 1 and self.target_width >= source_width and self.target_height >= source_height:
return
if source_width / self.target_width * sign >= source_height / self.target_height * sign:
resize_height = round(source_height * self.target_width / source_width)
resize_width = self.target_width
else:
resize_height = self.target_height
resize_width = round(source_width * self.target_height / source_height)
# ensure that filter should work on the real image size and not on the request
# size which might be smaller than the resized image in case `full-fit-in` is
# being used
requested_width = source_width if self.context.request.width == 'orig' else self.context.request.width
requested_height = source_height if self.context.request.height == 'orig' else self.context.request.height
self.context.request.width = int(max(requested_width, resize_width))
self.context.request.height = int(max(requested_height, resize_height))
self.engine.resize(resize_width, resize_height)
def debug(self):
if not self.context.request.focal_points:
return
for point in self.context.request.focal_points:
if point.width <= 1:
point.width = 10
if point.height <= 1:
point.height = 10
self.engine.draw_rectangle(int(point.x - (point.width / 2)),
int(point.y - (point.height / 2)),
point.width,
point.height)
|
thumbor/thumbor | thumbor/transformer.py | Transformer.do_image_operations | python | def do_image_operations(self):
def inner(future):
self.done_callback()
self.context.thread_pool.queue(
operation=self.img_operation_worker,
callback=inner
) | If ENGINE_THREADPOOL_SIZE > 0, this will schedule the image operations
into a threadpool. If not, it just executes them synchronously, and
calls self.done_callback when it's finished.
The actual work happens in self.img_operation_worker | train | https://github.com/thumbor/thumbor/blob/558ccdd6e3bc29e1c9ee3687372c4b3eb05ac607/thumbor/transformer.py#L219-L233 | null | class Transformer(object):
def __init__(self, context):
self.context = context
self.engine = self.context.request.engine
self.target_height = None
self.target_width = None
def _calculate_target_dimensions(self):
source_width, source_height = self.engine.size
source_width = float(source_width)
source_height = float(source_height)
if not self.context.request.width and not self.context.request.height:
self.target_width = source_width
self.target_height = source_height
else:
if self.context.request.width:
if self.context.request.width == "orig":
self.target_width = source_width
else:
self.target_width = float(self.context.request.width)
else:
self.target_width = self.engine.get_proportional_width(self.context.request.height)
if self.context.request.height:
if self.context.request.height == "orig":
self.target_height = source_height
else:
self.target_height = float(self.context.request.height)
else:
self.target_height = self.engine.get_proportional_height(self.context.request.width)
def get_target_dimensions(self):
"""
Returns the target dimensions and calculates them if necessary.
The target dimensions are display independent.
:return: Target dimensions as a tuple (width, height)
:rtype: (int, int)
"""
if self.target_height is None:
self._calculate_target_dimensions()
return int(self.target_width), int(self.target_height)
def adjust_focal_points(self):
source_width, source_height = self.engine.size
self.focal_points = None
if self.context.request.focal_points:
if self.context.request.should_crop:
self.focal_points = []
crop = self.context.request.crop
for point in self.context.request.focal_points:
if point.x < crop['left'] or point.x > crop['right'] or point.y < crop['top'] or point.y > crop['bottom']:
continue
point.x -= crop['left'] or 0
point.y -= crop['top'] or 0
self.focal_points.append(point)
else:
self.focal_points = self.context.request.focal_points
if not self.focal_points:
self.focal_points = [
FocalPoint.from_alignment(self.context.request.halign,
self.context.request.valign,
source_width,
source_height)
]
self.engine.focus(self.focal_points)
def transform(self, callback):
self.done_callback = callback
if self.context.config.RESPECT_ORIENTATION:
self.engine.reorientate()
self.trim()
self.smart_detect()
def trim(self):
is_gifsicle = (self.context.request.engine.extension == '.gif' and self.context.config.USE_GIFSICLE_ENGINE)
if self.context.request.trim is None or not trim_enabled or is_gifsicle:
return
mode, data = self.engine.image_data_as_rgb()
box = _bounding_box.apply(
mode,
self.engine.size[0],
self.engine.size[1],
self.context.request.trim_pos,
self.context.request.trim_tolerance,
data
)
if box[2] < box[0] or box[3] < box[1]:
logger.warn("Ignoring trim, there wouldn't be any image left, check the tolerance.")
return
self.engine.crop(box[0], box[1], box[2] + 1, box[3] + 1)
if self.context.request.should_crop:
self.context.request.crop['left'] -= box[0]
self.context.request.crop['top'] -= box[1]
self.context.request.crop['right'] -= box[0]
self.context.request.crop['bottom'] -= box[1]
@property
def smart_storage_key(self):
return self.context.request.image_url
@gen.coroutine
def smart_detect(self):
is_gifsicle = (self.context.request.engine.extension == '.gif' and self.context.config.USE_GIFSICLE_ENGINE)
if (not (self.context.modules.detectors and self.context.request.smart)) or is_gifsicle:
self.do_image_operations()
return
try:
# Beware! Boolean hell ahead.
#
# The `running_smart_detection` flag is needed so we can know
# whether `after_smart_detect()` is running synchronously or not.
#
# If we're running it in a sync fashion it will set
# `should_run_image_operations` to True so we can avoid running
# image operation inside the try block.
self.should_run_image_operations = False
self.running_smart_detection = True
yield self.do_smart_detection()
self.running_smart_detection = False
except Exception:
if not self.context.config.IGNORE_SMART_ERRORS:
raise
logger.exception("Ignored error during smart detection")
if self.context.config.USE_CUSTOM_ERROR_HANDLING:
self.context.modules.importer.error_handler.handle_error(
context=self.context,
handler=self.context.request_handler,
exception=sys.exc_info()
)
self.context.request.prevent_result_storage = True
self.context.request.detection_error = True
self.do_image_operations()
if self.should_run_image_operations:
self.do_image_operations()
@gen.coroutine
def do_smart_detection(self):
focal_points = yield gen.maybe_future(self.context.modules.storage.get_detector_data(self.smart_storage_key))
if focal_points is not None:
self.after_smart_detect(focal_points, points_from_storage=True)
else:
detectors = self.context.modules.detectors
detectors[0](self.context, index=0, detectors=detectors).detect(self.after_smart_detect)
def after_smart_detect(self, focal_points=[], points_from_storage=False):
for point in focal_points:
self.context.request.focal_points.append(FocalPoint.from_dict(point))
if self.context.request.focal_points and self.context.modules.storage and not points_from_storage:
storage = self.context.modules.storage
points = []
for point in self.context.request.focal_points:
points.append(point.to_dict())
storage.put_detector_data(self.smart_storage_key, points)
if self.running_smart_detection:
self.should_run_image_operations = True
return
self.do_image_operations()
def img_operation_worker(self):
if '.gif' == self.context.request.engine.extension and 'cover()' in self.context.request.filters:
self.extract_cover()
self.manual_crop()
self._calculate_target_dimensions()
self.adjust_focal_points()
if self.context.request.debug:
self.debug()
else:
if self.context.request.fit_in:
self.fit_in_resize()
else:
if not self.context.request.stretch:
self.auto_crop()
self.resize()
self.flip()
def extract_cover(self):
self.engine.extract_cover()
def manual_crop(self):
if self.context.request.should_crop:
def limit(dimension, maximum):
return min(max(dimension, 0), maximum)
source_width, source_height = self.engine.size
crop = self.context.request.crop
crop['left'] = limit(crop['left'], source_width)
crop['top'] = limit(crop['top'], source_height)
crop['right'] = limit(crop['right'], source_width)
crop['bottom'] = limit(crop['bottom'], source_height)
if crop['left'] >= crop['right'] or crop['top'] >= crop['bottom']:
self.context.request.should_crop = False
crop['left'] = crop['right'] = crop['top'] = crop['bottom'] = 0
return
self.engine.crop(crop['left'], crop['top'], crop['right'], crop['bottom'])
def auto_crop(self):
source_width, source_height = self.engine.size
target_height = self.target_height or 1
target_width = self.target_width or 1
source_ratio = round(float(source_width) / source_height, 2)
target_ratio = round(float(target_width) / target_height, 2)
if source_ratio == target_ratio:
return
focal_x, focal_y = self.get_center_of_mass()
if self.target_width / source_width > self.target_height / source_height:
crop_width = source_width
crop_height = int(round(source_width * self.target_height / target_width, 0))
else:
crop_width = int(round(math.ceil(self.target_width * source_height / target_height), 0))
crop_height = source_height
crop_left = int(round(min(max(focal_x - (crop_width / 2), 0.0), source_width - crop_width)))
crop_right = min(crop_left + crop_width, source_width)
crop_top = int(round(min(max(focal_y - (crop_height / 2), 0.0), source_height - crop_height)))
crop_bottom = min(crop_top + crop_height, source_height)
self.engine.crop(crop_left, crop_top, crop_right, crop_bottom)
def flip(self):
if self.context.request.horizontal_flip:
self.engine.flip_horizontally()
if self.context.request.vertical_flip:
self.engine.flip_vertically()
def get_center_of_mass(self):
total_weight = 0.0
total_x = 0.0
total_y = 0.0
for focal_point in self.focal_points:
total_weight += focal_point.weight
total_x += focal_point.x * focal_point.weight
total_y += focal_point.y * focal_point.weight
x = total_x / total_weight
y = total_y / total_weight
return x, y
def resize(self):
source_width, source_height = self.engine.size
if self.target_width == source_width and self.target_height == source_height:
return
self.engine.resize(self.target_width or 1, self.target_height or 1) # avoiding 0px images
def fit_in_resize(self):
source_width, source_height = self.engine.size
# invert width and height if image orientation is not the same as request orientation and need adaptive
if self.context.request.adaptive and (
(source_width - source_height < 0 and self.target_width - self.target_height > 0) or
(source_width - source_height > 0 and self.target_width - self.target_height < 0)
):
tmp = self.context.request.width
self.context.request.width = self.context.request.height
self.context.request.height = tmp
tmp = self.target_width
self.target_width = self.target_height
self.target_height = tmp
sign = 1
if self.context.request.full:
sign = -1
if sign == 1 and self.target_width >= source_width and self.target_height >= source_height:
return
if source_width / self.target_width * sign >= source_height / self.target_height * sign:
resize_height = round(source_height * self.target_width / source_width)
resize_width = self.target_width
else:
resize_height = self.target_height
resize_width = round(source_width * self.target_height / source_height)
# ensure that filter should work on the real image size and not on the request
# size which might be smaller than the resized image in case `full-fit-in` is
# being used
requested_width = source_width if self.context.request.width == 'orig' else self.context.request.width
requested_height = source_height if self.context.request.height == 'orig' else self.context.request.height
self.context.request.width = int(max(requested_width, resize_width))
self.context.request.height = int(max(requested_height, resize_height))
self.engine.resize(resize_width, resize_height)
def debug(self):
if not self.context.request.focal_points:
return
for point in self.context.request.focal_points:
if point.width <= 1:
point.width = 10
if point.height <= 1:
point.height = 10
self.engine.draw_rectangle(int(point.x - (point.width / 2)),
int(point.y - (point.height / 2)),
point.width,
point.height)
|
thumbor/thumbor | thumbor/engines/__init__.py | BaseEngine.get_orientation | python | def get_orientation(self):
exif_dict = self._get_exif_segment()
if exif_dict and piexif.ImageIFD.Orientation in exif_dict["0th"]:
return exif_dict["0th"][piexif.ImageIFD.Orientation]
return None | Returns the image orientation of the buffer image or None
if it is undefined. Gets the original value from the Exif tag.
If the buffer has been rotated, then the value is adjusted to 1.
:return: Orientation value (1 - 8)
:rtype: int or None | train | https://github.com/thumbor/thumbor/blob/558ccdd6e3bc29e1c9ee3687372c4b3eb05ac607/thumbor/engines/__init__.py#L246-L257 | [
"def _get_exif_segment(self):\n if (not hasattr(self, 'exif')) or self.exif is None:\n return None\n\n try:\n exif_dict = piexif.load(self.exif)\n except Exception:\n logger.exception('Ignored error handling exif for reorientation')\n else:\n return exif_dict\n return None... | class BaseEngine(object):
def __init__(self, context):
self.context = context
self.image = None
self.extension = None
self.source_width = None
self.source_height = None
self.icc_profile = None
self.frame_count = 1
self.metadata = None
@classmethod
def get_mimetype(cls, buffer):
if buffer.startswith('GIF8'):
return 'image/gif'
elif buffer.startswith('\x89PNG\r\n\x1a\n'):
return 'image/png'
elif buffer.startswith('\xff\xd8'):
return 'image/jpeg'
elif buffer.startswith('WEBP', 8):
return 'image/webp'
elif buffer.startswith('\x00\x00\x00\x0c'):
return 'image/jp2'
elif buffer.startswith('\x00\x00\x00 ftyp'):
return 'video/mp4'
elif buffer.startswith('\x1aE\xdf\xa3'):
return 'video/webm'
elif buffer.startswith('\x49\x49\x2A\x00') or buffer.startswith('\x4D\x4D\x00\x2A'):
return 'image/tiff'
elif SVG_RE.search(buffer[:2048].replace(b'\0', '')):
return 'image/svg+xml'
def wrap(self, multiple_engine):
for method_name in ['resize', 'crop', 'flip_vertically',
'flip_horizontally']:
setattr(self, method_name, multiple_engine.do_many(method_name))
setattr(self, 'read', multiple_engine.read)
def is_multiple(self):
return hasattr(self, 'multiple_engine') and self.multiple_engine is not None
def frame_engines(self):
return self.multiple_engine.frame_engines
def convert_svg_to_png(self, buffer):
if not cairosvg:
msg = """[BaseEngine] convert_svg_to_png failed cairosvg not
imported (if you want svg conversion to png please install cairosvg)
"""
logger.error(msg)
return buffer
try:
buffer = cairosvg.svg2png(bytestring=buffer, dpi=self.context.config.SVG_DPI)
mime = self.get_mimetype(buffer)
self.extension = EXTENSION.get(mime, '.jpg')
except ParseError:
mime = self.get_mimetype(buffer)
extension = EXTENSION.get(mime)
if extension is None or extension == '.svg':
raise
self.extension = extension
return buffer
def load(self, buffer, extension):
self.extension = extension
if extension is None:
mime = self.get_mimetype(buffer)
self.extension = EXTENSION.get(mime, '.jpg')
if self.extension == '.svg':
buffer = self.convert_svg_to_png(buffer)
image_or_frames = self.create_image(buffer)
if image_or_frames is None:
return
if METADATA_AVAILABLE:
try:
self.metadata = ImageMetadata.from_buffer(buffer)
self.metadata.read()
except Exception as e:
logger.error('Error reading image metadata: %s' % e)
if self.context.config.ALLOW_ANIMATED_GIFS and isinstance(
image_or_frames, (list, tuple)):
self.image = image_or_frames[0]
if len(image_or_frames) > 1:
self.multiple_engine = MultipleEngine(self)
for frame in image_or_frames:
self.multiple_engine.add_frame(frame)
self.wrap(self.multiple_engine)
else:
self.image = image_or_frames
if self.source_width is None:
self.source_width = self.size[0]
if self.source_height is None:
self.source_height = self.size[1]
@property
def size(self):
if self.is_multiple():
return self.multiple_engine.size()
return self.image.size
def can_convert_to_webp(self):
return self.size[0] <= WEBP_SIDE_LIMIT and self.size[1] <= WEBP_SIDE_LIMIT
def normalize(self):
width, height = self.size
self.source_width = width
self.source_height = height
if width > self.context.config.MAX_WIDTH \
or height > self.context.config.MAX_HEIGHT:
width_diff = width - self.context.config.MAX_WIDTH
height_diff = height - self.context.config.MAX_HEIGHT
if self.context.config.MAX_WIDTH and width_diff > height_diff:
height = self.get_proportional_height(
self.context.config.MAX_WIDTH
)
self.resize(self.context.config.MAX_WIDTH, height)
return True
elif self.context.config.MAX_HEIGHT and height_diff > width_diff:
width = self.get_proportional_width(
self.context.config.MAX_HEIGHT
)
self.resize(width, self.context.config.MAX_HEIGHT)
return True
return False
def get_proportional_width(self, new_height):
width, height = self.size
return round(float(new_height) * width / height, 0)
def get_proportional_height(self, new_width):
width, height = self.size
return round(float(new_width) * height / width, 0)
def _get_exif_segment(self):
if (not hasattr(self, 'exif')) or self.exif is None:
return None
try:
exif_dict = piexif.load(self.exif)
except Exception:
logger.exception('Ignored error handling exif for reorientation')
else:
return exif_dict
return None
def reorientate(self, override_exif=True):
"""
Rotates the image in the buffer so that it is oriented correctly.
If override_exif is True (default) then the metadata
orientation is adjusted as well.
:param override_exif: If the metadata should be adjusted as well.
:type override_exif: Boolean
"""
orientation = self.get_orientation()
if orientation is None:
return
if orientation == 2:
self.flip_horizontally()
elif orientation == 3:
self.rotate(180)
elif orientation == 4:
self.flip_vertically()
elif orientation == 5:
# Horizontal Mirror + Rotation 270 CCW
self.flip_vertically()
self.rotate(270)
elif orientation == 6:
self.rotate(270)
elif orientation == 7:
# Vertical Mirror + Rotation 270 CCW
self.flip_horizontally()
self.rotate(270)
elif orientation == 8:
self.rotate(90)
if orientation != 1 and override_exif:
exif_dict = self._get_exif_segment()
if exif_dict and piexif.ImageIFD.Orientation in exif_dict["0th"]:
exif_dict["0th"][piexif.ImageIFD.Orientation] = 1
try:
self.exif = piexif.dump(exif_dict)
except Exception as e:
msg = """[piexif] %s""" % e
logger.error(msg)
def gen_image(self, size, color):
raise NotImplementedError()
def create_image(self, buffer):
raise NotImplementedError()
def crop(self, left, top, right, bottom):
raise NotImplementedError()
def resize(self, width, height):
raise NotImplementedError()
def focus(self, points):
pass
def flip_horizontally(self):
raise NotImplementedError()
def flip_vertically(self):
raise NotImplementedError()
def rotate(self, degrees):
"""
Rotates the image the given amount CCW.
:param degrees: Amount to rotate in degrees.
:type amount: int
"""
pass
def read_multiple(self, images, extension=None):
raise NotImplementedError()
def read(self, extension, quality):
raise NotImplementedError()
def get_image_data(self):
raise NotImplementedError()
def set_image_data(self, data):
raise NotImplementedError()
def get_image_mode(self):
""" Possible return values should be: RGB, RBG, GRB, GBR,
BRG, BGR, RGBA, AGBR, ... """
raise NotImplementedError()
def paste(self, other_engine, pos, merge=True):
raise NotImplementedError()
def enable_alpha(self):
raise NotImplementedError()
def image_data_as_rgb(self, update_image=True):
raise NotImplementedError()
def strip_exif(self):
pass
def convert_to_grayscale(self, update_image=True, alpha=True):
raise NotImplementedError()
def draw_rectangle(self, x, y, width, height):
raise NotImplementedError()
def strip_icc(self):
pass
def extract_cover(self):
raise NotImplementedError()
def has_transparency(self):
raise NotImplementedError()
def cleanup(self):
pass
def can_auto_convert_png_to_jpg(self):
can_convert = (self.extension == '.png' and not self.has_transparency())
return can_convert
|
thumbor/thumbor | thumbor/engines/__init__.py | BaseEngine.reorientate | python | def reorientate(self, override_exif=True):
orientation = self.get_orientation()
if orientation is None:
return
if orientation == 2:
self.flip_horizontally()
elif orientation == 3:
self.rotate(180)
elif orientation == 4:
self.flip_vertically()
elif orientation == 5:
# Horizontal Mirror + Rotation 270 CCW
self.flip_vertically()
self.rotate(270)
elif orientation == 6:
self.rotate(270)
elif orientation == 7:
# Vertical Mirror + Rotation 270 CCW
self.flip_horizontally()
self.rotate(270)
elif orientation == 8:
self.rotate(90)
if orientation != 1 and override_exif:
exif_dict = self._get_exif_segment()
if exif_dict and piexif.ImageIFD.Orientation in exif_dict["0th"]:
exif_dict["0th"][piexif.ImageIFD.Orientation] = 1
try:
self.exif = piexif.dump(exif_dict)
except Exception as e:
msg = """[piexif] %s""" % e
logger.error(msg) | Rotates the image in the buffer so that it is oriented correctly.
If override_exif is True (default) then the metadata
orientation is adjusted as well.
:param override_exif: If the metadata should be adjusted as well.
:type override_exif: Boolean | train | https://github.com/thumbor/thumbor/blob/558ccdd6e3bc29e1c9ee3687372c4b3eb05ac607/thumbor/engines/__init__.py#L259-L299 | [
"def _get_exif_segment(self):\n if (not hasattr(self, 'exif')) or self.exif is None:\n return None\n\n try:\n exif_dict = piexif.load(self.exif)\n except Exception:\n logger.exception('Ignored error handling exif for reorientation')\n else:\n return exif_dict\n return None... | class BaseEngine(object):
def __init__(self, context):
self.context = context
self.image = None
self.extension = None
self.source_width = None
self.source_height = None
self.icc_profile = None
self.frame_count = 1
self.metadata = None
@classmethod
def get_mimetype(cls, buffer):
if buffer.startswith('GIF8'):
return 'image/gif'
elif buffer.startswith('\x89PNG\r\n\x1a\n'):
return 'image/png'
elif buffer.startswith('\xff\xd8'):
return 'image/jpeg'
elif buffer.startswith('WEBP', 8):
return 'image/webp'
elif buffer.startswith('\x00\x00\x00\x0c'):
return 'image/jp2'
elif buffer.startswith('\x00\x00\x00 ftyp'):
return 'video/mp4'
elif buffer.startswith('\x1aE\xdf\xa3'):
return 'video/webm'
elif buffer.startswith('\x49\x49\x2A\x00') or buffer.startswith('\x4D\x4D\x00\x2A'):
return 'image/tiff'
elif SVG_RE.search(buffer[:2048].replace(b'\0', '')):
return 'image/svg+xml'
def wrap(self, multiple_engine):
for method_name in ['resize', 'crop', 'flip_vertically',
'flip_horizontally']:
setattr(self, method_name, multiple_engine.do_many(method_name))
setattr(self, 'read', multiple_engine.read)
def is_multiple(self):
return hasattr(self, 'multiple_engine') and self.multiple_engine is not None
def frame_engines(self):
return self.multiple_engine.frame_engines
def convert_svg_to_png(self, buffer):
if not cairosvg:
msg = """[BaseEngine] convert_svg_to_png failed cairosvg not
imported (if you want svg conversion to png please install cairosvg)
"""
logger.error(msg)
return buffer
try:
buffer = cairosvg.svg2png(bytestring=buffer, dpi=self.context.config.SVG_DPI)
mime = self.get_mimetype(buffer)
self.extension = EXTENSION.get(mime, '.jpg')
except ParseError:
mime = self.get_mimetype(buffer)
extension = EXTENSION.get(mime)
if extension is None or extension == '.svg':
raise
self.extension = extension
return buffer
def load(self, buffer, extension):
self.extension = extension
if extension is None:
mime = self.get_mimetype(buffer)
self.extension = EXTENSION.get(mime, '.jpg')
if self.extension == '.svg':
buffer = self.convert_svg_to_png(buffer)
image_or_frames = self.create_image(buffer)
if image_or_frames is None:
return
if METADATA_AVAILABLE:
try:
self.metadata = ImageMetadata.from_buffer(buffer)
self.metadata.read()
except Exception as e:
logger.error('Error reading image metadata: %s' % e)
if self.context.config.ALLOW_ANIMATED_GIFS and isinstance(
image_or_frames, (list, tuple)):
self.image = image_or_frames[0]
if len(image_or_frames) > 1:
self.multiple_engine = MultipleEngine(self)
for frame in image_or_frames:
self.multiple_engine.add_frame(frame)
self.wrap(self.multiple_engine)
else:
self.image = image_or_frames
if self.source_width is None:
self.source_width = self.size[0]
if self.source_height is None:
self.source_height = self.size[1]
@property
def size(self):
if self.is_multiple():
return self.multiple_engine.size()
return self.image.size
def can_convert_to_webp(self):
return self.size[0] <= WEBP_SIDE_LIMIT and self.size[1] <= WEBP_SIDE_LIMIT
def normalize(self):
width, height = self.size
self.source_width = width
self.source_height = height
if width > self.context.config.MAX_WIDTH \
or height > self.context.config.MAX_HEIGHT:
width_diff = width - self.context.config.MAX_WIDTH
height_diff = height - self.context.config.MAX_HEIGHT
if self.context.config.MAX_WIDTH and width_diff > height_diff:
height = self.get_proportional_height(
self.context.config.MAX_WIDTH
)
self.resize(self.context.config.MAX_WIDTH, height)
return True
elif self.context.config.MAX_HEIGHT and height_diff > width_diff:
width = self.get_proportional_width(
self.context.config.MAX_HEIGHT
)
self.resize(width, self.context.config.MAX_HEIGHT)
return True
return False
def get_proportional_width(self, new_height):
width, height = self.size
return round(float(new_height) * width / height, 0)
def get_proportional_height(self, new_width):
width, height = self.size
return round(float(new_width) * height / width, 0)
def _get_exif_segment(self):
if (not hasattr(self, 'exif')) or self.exif is None:
return None
try:
exif_dict = piexif.load(self.exif)
except Exception:
logger.exception('Ignored error handling exif for reorientation')
else:
return exif_dict
return None
def get_orientation(self):
"""
Returns the image orientation of the buffer image or None
if it is undefined. Gets the original value from the Exif tag.
If the buffer has been rotated, then the value is adjusted to 1.
:return: Orientation value (1 - 8)
:rtype: int or None
"""
exif_dict = self._get_exif_segment()
if exif_dict and piexif.ImageIFD.Orientation in exif_dict["0th"]:
return exif_dict["0th"][piexif.ImageIFD.Orientation]
return None
def gen_image(self, size, color):
raise NotImplementedError()
def create_image(self, buffer):
raise NotImplementedError()
def crop(self, left, top, right, bottom):
raise NotImplementedError()
def resize(self, width, height):
raise NotImplementedError()
def focus(self, points):
pass
def flip_horizontally(self):
raise NotImplementedError()
def flip_vertically(self):
raise NotImplementedError()
def rotate(self, degrees):
"""
Rotates the image the given amount CCW.
:param degrees: Amount to rotate in degrees.
:type amount: int
"""
pass
def read_multiple(self, images, extension=None):
raise NotImplementedError()
def read(self, extension, quality):
raise NotImplementedError()
def get_image_data(self):
raise NotImplementedError()
def set_image_data(self, data):
raise NotImplementedError()
def get_image_mode(self):
""" Possible return values should be: RGB, RBG, GRB, GBR,
BRG, BGR, RGBA, AGBR, ... """
raise NotImplementedError()
def paste(self, other_engine, pos, merge=True):
raise NotImplementedError()
def enable_alpha(self):
raise NotImplementedError()
def image_data_as_rgb(self, update_image=True):
raise NotImplementedError()
def strip_exif(self):
pass
def convert_to_grayscale(self, update_image=True, alpha=True):
raise NotImplementedError()
def draw_rectangle(self, x, y, width, height):
raise NotImplementedError()
def strip_icc(self):
pass
def extract_cover(self):
raise NotImplementedError()
def has_transparency(self):
raise NotImplementedError()
def cleanup(self):
pass
def can_auto_convert_png_to_jpg(self):
can_convert = (self.extension == '.png' and not self.has_transparency())
return can_convert
|
thumbor/thumbor | thumbor/handlers/__init__.py | BaseHandler.get_image | python | def get_image(self):
try:
result = yield self._fetch(
self.context.request.image_url
)
if not result.successful:
if result.loader_error == LoaderResult.ERROR_NOT_FOUND:
self._error(404)
return
elif result.loader_error == LoaderResult.ERROR_UPSTREAM:
# Return a Bad Gateway status if the error came from upstream
self._error(502)
return
elif result.loader_error == LoaderResult.ERROR_TIMEOUT:
# Return a Gateway Timeout status if upstream timed out (i.e. 599)
self._error(504)
return
elif isinstance(result.loader_error, int):
self._error(result.loader_error)
return
elif hasattr(result, 'engine_error') and result.engine_error == EngineResult.COULD_NOT_LOAD_IMAGE:
self._error(400)
return
else:
self._error(500)
return
except Exception as e:
msg = '[BaseHandler] get_image failed for url `{url}`. error: `{error}`'.format(
url=self.context.request.image_url,
error=e
)
self.log_exception(*sys.exc_info())
if 'cannot identify image file' in e.message:
logger.warning(msg)
self._error(400)
else:
logger.error(msg)
self._error(500)
return
normalized = result.normalized
buffer = result.buffer
engine = result.engine
req = self.context.request
if engine is None:
if buffer is None:
self._error(504)
return
engine = self.context.request.engine
try:
engine.load(buffer, self.context.request.extension)
except Exception:
self._error(504)
return
self.context.transformer = Transformer(self.context)
def transform():
self.normalize_crops(normalized, req, engine)
if req.meta:
self.context.transformer.engine = \
self.context.request.engine = \
JSONEngine(engine, req.image_url, req.meta_callback)
self.context.transformer.transform(self.after_transform)
self.filters_runner.apply_filters(thumbor.filters.PHASE_AFTER_LOAD, transform) | This function is called after the PRE_LOAD filters have been applied.
It applies the AFTER_LOAD filters on the result, then crops the image. | train | https://github.com/thumbor/thumbor/blob/558ccdd6e3bc29e1c9ee3687372c4b3eb05ac607/thumbor/handlers/__init__.py#L146-L224 | [
"def _error(self, status, msg=None):\n self.set_status(status)\n if msg is not None:\n logger.warn(msg)\n self.finish()\n"
] | class BaseHandler(tornado.web.RequestHandler):
url_locks = {}
def prepare(self, *args, **kwargs):
super(BaseHandler, self).prepare(*args, **kwargs)
if not hasattr(self, 'context'):
return
self._response_ext = None
self._response_length = None
self._response_start = datetime.datetime.now()
self.context.metrics.incr('response.count')
def on_finish(self, *args, **kwargs):
super(BaseHandler, self).on_finish(*args, **kwargs)
if not hasattr(self, 'context'):
return
total_time = (datetime.datetime.now() - self._response_start).total_seconds() * 1000
status = self.get_status()
self.context.metrics.timing('response.time', total_time)
self.context.metrics.timing('response.time.{0}'.format(status), total_time)
self.context.metrics.incr('response.status.{0}'.format(status))
if self._response_ext is not None:
ext = self._response_ext
self.context.metrics.incr('response.format{0}'.format(ext))
self.context.metrics.timing('response.time{0}'.format(ext), total_time)
if self._response_length is not None:
self.context.metrics.incr('response.bytes{0}'.format(ext), self._response_length)
self.context.request_handler = None
if hasattr(self.context, 'request'):
self.context.request.engine = None
self.context.modules = None
self.context.filters_factory = None
self.context.metrics = None
self.context.thread_pool = None
self.context.transformer = None
self.context = None
def _error(self, status, msg=None):
self.set_status(status)
if msg is not None:
logger.warn(msg)
self.finish()
@gen.coroutine
def execute_image_operations(self):
self.context.request.quality = None
req = self.context.request
conf = self.context.config
should_store = self.context.config.RESULT_STORAGE_STORES_UNSAFE or not self.context.request.unsafe
if self.context.modules.result_storage and should_store:
start = datetime.datetime.now()
try:
result = yield gen.maybe_future(self.context.modules.result_storage.get())
except Exception as e:
logger.exception('[BaseHander.execute_image_operations] %s', e)
self._error(500, 'Error while trying to get the image from the result storage: {}'.format(e))
return
finish = datetime.datetime.now()
self.context.metrics.timing('result_storage.incoming_time', (finish - start).total_seconds() * 1000)
if result is None:
self.context.metrics.incr('result_storage.miss')
else:
self.context.metrics.incr('result_storage.hit')
self.context.metrics.incr('result_storage.bytes_read', len(result))
logger.debug('[RESULT_STORAGE] IMAGE FOUND: %s' % req.url)
self.finish_request(result)
return
if conf.MAX_WIDTH and (not isinstance(req.width, basestring)) and req.width > conf.MAX_WIDTH:
req.width = conf.MAX_WIDTH
if conf.MAX_HEIGHT and (not isinstance(req.height, basestring)) and req.height > conf.MAX_HEIGHT:
req.height = conf.MAX_HEIGHT
req.meta_callback = conf.META_CALLBACK_NAME or self.request.arguments.get('callback', [None])[0]
self.filters_runner = self.context.filters_factory.create_instances(self.context, self.context.request.filters)
# Apply all the filters from the PRE_LOAD phase and call get_image() afterwards.
self.filters_runner.apply_filters(thumbor.filters.PHASE_PRE_LOAD, self.get_image)
@gen.coroutine # NOQA
def normalize_crops(self, normalized, req, engine):
new_crops = None
if normalized and req.should_crop:
crop_left = req.crop['left']
crop_top = req.crop['top']
crop_right = req.crop['right']
crop_bottom = req.crop['bottom']
actual_width, actual_height = engine.size
if not req.width and not req.height:
actual_width = engine.size[0]
actual_height = engine.size[1]
elif req.width:
actual_height = engine.get_proportional_height(engine.size[0])
elif req.height:
actual_width = engine.get_proportional_width(engine.size[1])
new_crops = self.translate_crop_coordinates(
engine.source_width,
engine.source_height,
actual_width,
actual_height,
crop_left,
crop_top,
crop_right,
crop_bottom
)
req.crop['left'] = new_crops[0]
req.crop['top'] = new_crops[1]
req.crop['right'] = new_crops[2]
req.crop['bottom'] = new_crops[3]
def after_transform(self):
if self.context.request.extension == '.gif' and self.context.config.USE_GIFSICLE_ENGINE:
self.finish_request()
else:
self.filters_runner.apply_filters(thumbor.filters.PHASE_POST_TRANSFORM, self.finish_request)
def is_webp(self, context):
return (context.config.AUTO_WEBP and
context.request.accepts_webp and
not context.request.engine.is_multiple() and
context.request.engine.can_convert_to_webp())
def is_animated_gif(self, data):
if data[:6] not in [b"GIF87a", b"GIF89a"]:
return False
i = 10 # skip header
frames = 0
def skip_color_table(i, flags):
if flags & 0x80:
i += 3 << ((flags & 7) + 1)
return i
flags = ord(data[i])
i = skip_color_table(i + 3, flags)
while frames < 2:
block = data[i]
i += 1
if block == b'\x3B':
break
if block == b'\x21':
i += 1
elif block == b'\x2C':
frames += 1
i += 8
i = skip_color_table(i + 1, ord(data[i]))
i += 1
else:
return False
while True:
j = ord(data[i])
i += 1
if not j:
break
i += j
return frames > 1
def can_auto_convert_png_to_jpg(self):
request_override = self.context.request.auto_png_to_jpg
config = self.context.config
enabled = (config.AUTO_PNG_TO_JPG and request_override is None) or request_override
if enabled:
return self.context.request.engine.can_auto_convert_png_to_jpg()
return False
def define_image_type(self, context, result):
if result is not None:
if isinstance(result, ResultStorageResult):
buffer = result.buffer
else:
buffer = result
image_extension = EXTENSION.get(BaseEngine.get_mimetype(buffer), '.jpg')
else:
image_extension = context.request.format
if image_extension is not None:
image_extension = '.%s' % image_extension
logger.debug('Image format specified as %s.' % image_extension)
elif self.is_webp(context):
image_extension = '.webp'
logger.debug('Image format set by AUTO_WEBP as %s.' % image_extension)
elif self.can_auto_convert_png_to_jpg():
image_extension = '.jpg'
logger.debug('Image format set by AUTO_PNG_TO_JPG as %s.' % image_extension)
else:
image_extension = context.request.engine.extension
logger.debug('No image format specified. Retrieving from the image extension: %s.' % image_extension)
content_type = CONTENT_TYPE.get(image_extension, CONTENT_TYPE['.jpg'])
if context.request.meta:
context.request.meta_callback = context.config.META_CALLBACK_NAME or self.request.arguments.get('callback', [None])[0]
content_type = 'text/javascript' if context.request.meta_callback else 'application/json'
logger.debug('Metadata requested. Serving content type of %s.' % content_type)
logger.debug('Content Type of %s detected.' % content_type)
return (image_extension, content_type)
def _load_results(self, context):
image_extension, content_type = self.define_image_type(context, None)
quality = self.context.request.quality
if quality is None:
if image_extension == '.webp' and self.context.config.WEBP_QUALITY is not None:
quality = self.context.config.get('WEBP_QUALITY')
else:
quality = self.context.config.QUALITY
results = context.request.engine.read(image_extension, quality)
if context.request.max_bytes is not None:
results = self.reload_to_fit_in_kb(
context.request.engine,
results,
image_extension,
quality,
context.request.max_bytes
)
if not context.request.meta:
results = self.optimize(context, image_extension, results)
# An optimizer might have modified the image format.
content_type = BaseEngine.get_mimetype(results)
return results, content_type
@gen.coroutine
def _process_result_from_storage(self, result):
if self.context.config.SEND_IF_MODIFIED_LAST_MODIFIED_HEADERS:
# Handle If-Modified-Since & Last-Modified header
try:
if isinstance(result, ResultStorageResult):
result_last_modified = result.last_modified
else:
result_last_modified = yield gen.maybe_future(self.context.modules.result_storage.last_updated())
if result_last_modified:
if 'If-Modified-Since' in self.request.headers:
date_modified_since = datetime.datetime.strptime(
self.request.headers['If-Modified-Since'], HTTP_DATE_FMT
).replace(tzinfo=pytz.utc)
if result_last_modified <= date_modified_since:
self.set_status(304)
self.finish()
return
self.set_header('Last-Modified', result_last_modified.strftime(HTTP_DATE_FMT))
except NotImplementedError:
logger.warn('last_updated method is not supported by your result storage service, hence If-Modified-Since & '
'Last-Updated headers support is disabled.')
@gen.coroutine
def finish_request(self, result_from_storage=None):
if result_from_storage is not None:
self._process_result_from_storage(result_from_storage)
image_extension, content_type = self.define_image_type(self.context, result_from_storage)
self._write_results_to_client(result_from_storage, content_type)
return
context = self.context
result_storage = context.modules.result_storage
metrics = context.metrics
should_store = result_storage and not context.request.prevent_result_storage \
and (context.config.RESULT_STORAGE_STORES_UNSAFE or not context.request.unsafe)
def inner(future):
try:
future_result = future.result()
except Exception as e:
logger.exception('[BaseHander.finish_request] %s', e)
self._error(500, 'Error while trying to fetch the image: {}'.format(e))
return
results, content_type = future_result
self._write_results_to_client(results, content_type)
if should_store:
tornado.ioloop.IOLoop.instance().add_callback(self._store_results, result_storage, metrics, results)
self.context.thread_pool.queue(
operation=functools.partial(self._load_results, context),
callback=inner,
)
def _write_results_to_client(self, results, content_type):
max_age = self.context.config.MAX_AGE
if self.context.request.max_age is not None:
max_age = self.context.request.max_age
if self.context.request.prevent_result_storage or self.context.request.detection_error:
max_age = self.context.config.MAX_AGE_TEMP_IMAGE
if max_age:
self.set_header('Cache-Control', 'max-age=' + str(max_age) + ',public')
self.set_header('Expires', datetime.datetime.utcnow() + datetime.timedelta(seconds=max_age))
self.set_header('Server', 'Thumbor/%s' % __version__)
self.set_header('Content-Type', content_type)
if isinstance(results, ResultStorageResult):
buffer = results.buffer
else:
buffer = results
# auto-convert configured?
should_vary = self.context.config.AUTO_WEBP
# we have image (not video)
should_vary = should_vary and content_type.startswith("image/")
# output format is not requested via format filter
should_vary = should_vary and not (
self.context.request.format and # format is supported by filter
bool(re.search(r"format\([^)]+\)", self.context.request.filters)) # filter is in request
)
# our image is not animated gif
should_vary = should_vary and not self.is_animated_gif(buffer)
if should_vary:
self.set_header('Vary', 'Accept')
self.context.headers = self._headers.copy()
self._response_ext = EXTENSION.get(content_type)
self._response_length = len(buffer)
self.write(buffer)
self.finish()
@gen.coroutine
def _store_results(self, result_storage, metrics, results):
start = datetime.datetime.now()
yield gen.maybe_future(result_storage.put(results))
finish = datetime.datetime.now()
metrics.incr('result_storage.bytes_written', len(results))
metrics.timing('result_storage.outgoing_time', (finish - start).total_seconds() * 1000)
def optimize(self, context, image_extension, results):
for optimizer in context.modules.optimizers:
new_results = optimizer(context).run_optimizer(image_extension, results)
if new_results is not None:
results = new_results
return results
def reload_to_fit_in_kb(self, engine, initial_results, extension, initial_quality, max_bytes):
if extension not in ['.webp', '.jpg', '.jpeg'] or len(initial_results) <= max_bytes:
return initial_results
results = initial_results
quality = initial_quality
while len(results) > max_bytes:
quality = int(quality * 0.75)
if quality < 10:
logger.debug('Could not find any reduction that matches required size of %d bytes.' % max_bytes)
return initial_results
logger.debug('Trying to downsize image with quality of %d...' % quality)
results = engine.read(extension, quality)
prev_result = results
while len(results) <= max_bytes and quality < initial_quality:
quality = max(initial_quality, int(quality * 1.1))
logger.debug('Trying to upsize image with quality of %d...' % quality)
prev_result = results
results = engine.read(extension, quality)
return prev_result
@classmethod
def translate_crop_coordinates(
cls,
original_width,
original_height,
width,
height,
crop_left,
crop_top,
crop_right,
crop_bottom):
if original_width == width and original_height == height:
return
crop_left = crop_left * width / original_width
crop_top = crop_top * height / original_height
crop_right = crop_right * width / original_width
crop_bottom = crop_bottom * height / original_height
return (crop_left, crop_top, crop_right, crop_bottom)
def validate(self, path):
if not hasattr(self.context.modules.loader, 'validate'):
return True
is_valid = self.context.modules.loader.validate(self.context, path)
if not is_valid:
logger.warn('Request denied because the specified path "%s" was not identified by the loader as a valid path' % path)
return is_valid
@gen.coroutine
def _fetch(self, url):
"""
:param url:
:type url:
:return:
:rtype:
"""
fetch_result = FetchResult()
storage = self.context.modules.storage
yield self.acquire_url_lock(url)
try:
fetch_result.buffer = yield gen.maybe_future(storage.get(url))
mime = None
if fetch_result.buffer is not None:
self.release_url_lock(url)
fetch_result.successful = True
self.context.metrics.incr('storage.hit')
mime = BaseEngine.get_mimetype(fetch_result.buffer)
self.context.request.extension = EXTENSION.get(mime, '.jpg')
if mime == 'image/gif' and self.context.config.USE_GIFSICLE_ENGINE:
self.context.request.engine = self.context.modules.gif_engine
else:
self.context.request.engine = self.context.modules.engine
raise gen.Return(fetch_result)
else:
self.context.metrics.incr('storage.miss')
loader_result = yield self.context.modules.loader.load(self.context, url)
finally:
self.release_url_lock(url)
if isinstance(loader_result, LoaderResult):
# TODO _fetch should probably return a result object vs a list to
# to allow returning metadata
if not loader_result.successful:
fetch_result.buffer = None
fetch_result.loader_error = loader_result.error
raise gen.Return(fetch_result)
fetch_result.buffer = loader_result.buffer
else:
# Handle old loaders
fetch_result.buffer = loader_result
if fetch_result.buffer is None:
raise gen.Return(fetch_result)
fetch_result.successful = True
if mime is None:
mime = BaseEngine.get_mimetype(fetch_result.buffer)
self.context.request.extension = extension = EXTENSION.get(mime, '.jpg')
try:
if mime == 'image/gif' and self.context.config.USE_GIFSICLE_ENGINE:
self.context.request.engine = self.context.modules.gif_engine
else:
self.context.request.engine = self.context.modules.engine
self.context.request.engine.load(fetch_result.buffer, extension)
if self.context.request.engine.image is None:
fetch_result.successful = False
fetch_result.buffer = None
fetch_result.engine = self.context.request.engine
fetch_result.engine_error = EngineResult.COULD_NOT_LOAD_IMAGE
raise gen.Return(fetch_result)
fetch_result.normalized = self.context.request.engine.normalize()
# Allows engine or loader to override storage on the fly for the purpose of
# marking a specific file as unstoreable
storage = self.context.modules.storage
is_no_storage = isinstance(storage, NoStorage)
is_mixed_storage = isinstance(storage, MixedStorage)
is_mixed_no_file_storage = is_mixed_storage and isinstance(storage.file_storage, NoStorage)
if not (is_no_storage or is_mixed_no_file_storage):
storage.put(url, fetch_result.buffer)
storage.put_crypto(url)
except Exception:
fetch_result.successful = False
finally:
if not fetch_result.successful:
raise
fetch_result.buffer = None
fetch_result.engine = self.context.request.engine
raise gen.Return(fetch_result)
@gen.coroutine
def get_blacklist_contents(self):
filename = 'blacklist.txt'
exists = yield gen.maybe_future(self.context.modules.storage.exists(filename))
if exists:
blacklist = yield gen.maybe_future(self.context.modules.storage.get(filename))
raise tornado.gen.Return(blacklist)
else:
raise tornado.gen.Return("")
@gen.coroutine
def acquire_url_lock(self, url):
if url not in BaseHandler.url_locks:
BaseHandler.url_locks[url] = Condition()
else:
yield BaseHandler.url_locks[url].wait()
def release_url_lock(self, url):
try:
BaseHandler.url_locks[url].notify_all()
del BaseHandler.url_locks[url]
except KeyError:
pass
|
thumbor/thumbor | thumbor/url_composer.py | main | python | def main(arguments=None):
'''Converts a given url with the specified arguments.'''
parsed_options, arguments = get_options(arguments)
image_url = arguments[0]
image_url = quote(image_url)
try:
config = Config.load(None)
except Exception:
config = None
if not parsed_options.key and not config:
sys.stdout.write('Error: The -k or --key argument is mandatory. For more information type thumbor-url -h\n')
return
security_key, thumbor_params = get_thumbor_params(image_url, parsed_options, config)
crypto = CryptoURL(key=security_key)
url = crypto.generate(**thumbor_params)
sys.stdout.write('URL:\n')
sys.stdout.write('%s\n' % url)
return url | Converts a given url with the specified arguments. | train | https://github.com/thumbor/thumbor/blob/558ccdd6e3bc29e1c9ee3687372c4b3eb05ac607/thumbor/url_composer.py#L159-L183 | [
"def get_options(arguments):\n if arguments is None:\n arguments = sys.argv[1:]\n\n parser = get_parser()\n\n (parsed_options, arguments) = parser.parse_args(arguments)\n\n if not arguments:\n sys.stdout.write('Error: The image argument is mandatory. For more information type thumbor-url -... | #!/usr/bin/python
# -*- coding: utf-8 -*-
# thumbor imaging service
# https://github.com/thumbor/thumbor/wiki
# Licensed under the MIT license:
# http://www.opensource.org/licenses/mit-license
# Copyright (c) 2011 globo.com thumbor@googlegroups.com
import sys
import optparse
from six.moves.urllib.parse import quote
from thumbor import __version__
from libthumbor import CryptoURL
from thumbor.config import Config
def get_parser():
parser = optparse.OptionParser(
usage='thumbor-url [options] imageurl or type thumbor-url -h (--help) for help',
description=__doc__,
version=__version__
)
parser.add_option(
'-l', '--key_file', dest='key_file', default=None, help='The file to read the security key from [default: %default].'
)
parser.add_option(
'-k', '--key', dest='key', default=None, help='The security key to encrypt the url with [default: %default].'
)
parser.add_option(
'-w', '--width', dest='width', type='int', default=0, help='The target width for the image [default: %default].'
)
parser.add_option(
'-e', '--height', dest='height', type='int', default=0, help='The target height for the image [default: %default].'
)
parser.add_option(
'-n', '--fitin', dest='fitin', action='store_true', default=False,
help='Indicates that fit-in resizing should be performed.'
)
parser.add_option(
'-m', '--meta', dest='meta', action='store_true', default=False,
help='Indicates that meta information should be retrieved.'
)
parser.add_option(
'', '--adaptive', action='store_true', dest='adaptive', default=False,
help='Indicates that adaptive fit-in cropping should be used.'
)
parser.add_option(
'', '--full', action='store_true', dest='full', default=False, help='Indicates that fit-full cropping should be used.'
)
parser.add_option(
'-s', '--smart', action='store_true', dest='smart', default=False, help='Indicates that smart cropping should be used.'
)
parser.add_option(
'-t', '--trim', action='store_true', default=False, help='Indicate that surrounding whitespace should be trimmed.'
)
parser.add_option(
'-f', '--horizontal-flip', action='store_true', dest='horizontal_flip', default=False,
help='Indicates that the image should be horizontally flipped.'
)
parser.add_option(
'-v', '--vertical-flip', action='store_true', dest='vertical_flip', default=False,
help='Indicates that the image should be vertically flipped.'
)
parser.add_option(
'-a', '--halign', dest='halign', default='center',
help='The horizontal alignment to use for cropping [default: %default].'
)
parser.add_option(
'-i', '--valign', dest='valign', default='middle',
help='The vertical alignment to use for cropping [default: %default].'
)
parser.add_option(
'', '--filters', dest='filters', action='append',
help='Filters to be applied to the image, e.g. brightness(10).'
)
parser.add_option(
'-o', '--old-format', dest='old', action='store_true', default=False,
help='Indicates that thumbor should generate old-format urls [default: %default].'
)
parser.add_option(
'-c', '--crop', dest='crop', default=None,
help='The coordinates of the points to manual cropping in the format leftxtop:rightxbottom '
'(100x200:400x500) [default: %default].'
)
return parser
def get_options(arguments):
if arguments is None:
arguments = sys.argv[1:]
parser = get_parser()
(parsed_options, arguments) = parser.parse_args(arguments)
if not arguments:
sys.stdout.write('Error: The image argument is mandatory. For more information type thumbor-url -h\n')
return
return parsed_options, arguments
def get_thumbor_params(image_url, params, config):
if params.key_file:
f = open(params.key_file)
security_key = f.read().strip()
f.close()
else:
security_key = config.SECURITY_KEY if not params.key else params.key
crop_left = crop_top = crop_right = crop_bottom = 0
if params.crop:
crops = params.crop.split(':')
crop_left, crop_top = crops[0].split('x')
crop_right, crop_bottom = crops[1].split('x')
options = {
'old': params.old,
'width': params.width,
'height': params.height,
'smart': params.smart,
'meta': params.meta,
'horizontal_flip': params.horizontal_flip,
'vertical_flip': params.vertical_flip,
'halign': params.halign,
'valign': params.valign,
'trim': params.trim,
'crop_left': crop_left,
'crop_top': crop_top,
'crop_right': crop_right,
'crop_bottom': crop_bottom,
'filters': params.filters,
'image_url': image_url,
'fit_in': False,
'full_fit_in': False,
'adaptive_fit_in': False,
'adaptive_full_fit_in': False,
}
if params.fitin and params.full and params.adaptive:
options['adaptive_full_fit_in'] = True
elif params.fitin and params.full:
options['full_fit_in'] = True
elif params.fitin and params.adaptive:
options['adaptive_fit_in'] = True
elif params.fitin:
options['fit_in'] = True
return security_key, options
if __name__ == '__main__':
main(sys.argv[1:])
|
thumbor/thumbor | thumbor/engines/extensions/pil.py | checkImages | python | def checkImages(images):
# Init results
images2 = []
for im in images:
if PIL and isinstance(im, PIL.Image.Image):
# We assume PIL images are allright
images2.append(im)
elif np and isinstance(im, np.ndarray):
# Check and convert dtype
if im.dtype == np.uint8:
images2.append(im) # Ok
elif im.dtype in [np.float32, np.float64]:
im = im.copy()
im[im < 0] = 0
im[im > 1] = 1
im *= 255
images2.append(im.astype(np.uint8))
else:
im = im.astype(np.uint8)
images2.append(im)
# Check size
if im.ndim == 2:
pass # ok
elif im.ndim == 3:
if im.shape[2] not in [3, 4]:
raise ValueError('This array can not represent an image.')
else:
raise ValueError('This array can not represent an image.')
else:
raise ValueError('Invalid image type: ' + str(type(im)))
# Done
return images2 | checkImages(images)
Check numpy images and correct intensity range etc.
The same for all movie formats. | train | https://github.com/thumbor/thumbor/blob/558ccdd6e3bc29e1c9ee3687372c4b3eb05ac607/thumbor/engines/extensions/pil.py#L91-L129 | null | # -*- coding: utf-8 -*-
# Copyright (C) 2012, Almar Klein, Ant1, Marius van Voorden
#
# This code is subject to the (new) BSD license:
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the <organization> nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
""" Module images2gif
Provides functionality for reading and writing animated GIF images.
Use writeGif to write a series of numpy arrays or PIL images as an
animated GIF. Use readGif to read an animated gif as a series of numpy
arrays.
Note that since July 2004, all patents on the LZW compression patent have
expired. Therefore the GIF format may now be used freely.
Acknowledgements
----------------
Many thanks to Ant1 for:
* noting the use of "palette=PIL.Image.ADAPTIVE", which significantly
improves the results.
* the modifications to save each image with its own palette, or optionally
the global palette (if its the same).
Many thanks to Marius van Voorden for porting the NeuQuant quantization
algorithm of Anthony Dekker to Python (See the NeuQuant class for its
license).
Many thanks to Alex Robinson for implementing the concept of subrectangles,
which (depening on image content) can give a very significant reduction in
file size.
This code is based on gifmaker (in the scripts folder of the source
distribution of PIL)
Usefull links
-------------
* http://tronche.com/computer-graphics/gif/
* http://en.wikipedia.org/wiki/Graphics_Interchange_Format
* http://www.w3.org/Graphics/GIF/spec-gif89a.txt
"""
# todo: This module should be part of imageio (or at least based on)
from __future__ import absolute_import
import os
try:
import PIL
from PIL import Image
from PIL.GifImagePlugin import getheader, getdata
except ImportError:
PIL = None
try:
import numpy as np
except ImportError:
np = None
# getheader gives a 87a header and a color palette (two elements in a list).
# getdata()[0] gives the Image Descriptor up to (including) "LZW min code size".
# getdatas()[1:] is the image data itself in chuncks of 256 bytes (well
# technically the first byte says how many bytes follow, after which that
# amount (max 255) follows).
def intToBin(i):
""" Integer to two bytes """
# devide in two parts (bytes)
i1 = i % 256
i2 = int(i / 256)
# make string (little endian)
return chr(i1) + chr(i2)
class GifWriter:
""" GifWriter()
Class that contains methods for helping write the animated GIF file.
"""
def getheaderAnim(self, im):
""" getheaderAnim(im)
Get animation header. To replace PILs getheader()[0]
"""
bb = "GIF89a"
bb += intToBin(im.size[0])
bb += intToBin(im.size[1])
bb += "\x87\x00\x00"
return bb
def getImageDescriptor(self, im, xy=None):
""" getImageDescriptor(im, xy=None)
Used for the local color table properties per image.
Otherwise global color table applies to all frames irrespective of
whether additional colors comes in play that require a redefined
palette. Still a maximum of 256 color per frame, obviously.
Written by Ant1 on 2010-08-22
Modified by Alex Robinson in Janurari 2011 to implement subrectangles.
"""
# Defaule use full image and place at upper left
if xy is None:
xy = (0, 0)
# Image separator,
bb = '\x2C'
# Image position and size
bb += intToBin(xy[0]) # Left position
bb += intToBin(xy[1]) # Top position
bb += intToBin(im.size[0]) # image width
bb += intToBin(im.size[1]) # image height
# packed field: local color table flag1, interlace0, sorted table0,
# reserved00, lct size111=7=2^(7+1)=256.
bb += '\x87'
# LZW minimum size code now comes later, begining of [image data] blocks
return bb
def getAppExt(self, loops=float('inf')):
""" getAppExt(loops=float('inf'))
Application extention. This part specifies the amount of loops.
If loops is 0 or inf, it goes on infinitely.
"""
if loops == 0 or loops == float('inf'):
loops = 2 ** 16 - 1
# bb = "" application extension should not be used
# (the extension interprets zero loops
# to mean an infinite number of loops)
# Mmm, does not seem to work
if True:
bb = "\x21\xFF\x0B" # application extension
bb += "NETSCAPE2.0"
bb += "\x03\x01"
bb += intToBin(loops)
bb += '\x00' # end
return bb
def getGraphicsControlExt(self, duration=0.1, dispose=2):
""" getGraphicsControlExt(duration=0.1, dispose=2)
Graphics Control Extension. A sort of header at the start of
each image. Specifies duration and transparancy.
Dispose
-------
* 0 - No disposal specified.
* 1 - Do not dispose. The graphic is to be left in place.
* 2 - Restore to background color. The area used by the graphic
must be restored to the background color.
* 3 - Restore to previous. The decoder is required to restore the
area overwritten by the graphic with what was there prior to
rendering the graphic.
* 4-7 -To be defined.
"""
bb = '\x21\xF9\x04'
bb += chr((dispose & 3) << 2) # low bit 1 == transparency,
# 2nd bit 1 == user input , next 3 bits, the low two of which are used,
# are dispose.
bb += intToBin(int(duration * 100)) # in 100th of seconds
bb += '\x00' # no transparant color
bb += '\x00' # end
return bb
def handleSubRectangles(self, images, subRectangles):
""" handleSubRectangles(images)
Handle the sub-rectangle stuff. If the rectangles are given by the
user, the values are checked. Otherwise the subrectangles are
calculated automatically.
"""
if isinstance(subRectangles, (tuple, list)):
# xy given directly
# Check xy
xy = subRectangles
if xy is None:
xy = (0, 0)
if hasattr(xy, '__len__'):
if len(xy) == len(images):
xy = [xxyy for xxyy in xy]
else:
raise ValueError("len(xy) doesn't match amount of images.")
else:
xy = [xy for im in images]
xy[0] = (0, 0)
else:
# Calculate xy using some basic image processing
# Check Numpy
if np is None:
raise RuntimeError("Need Numpy to use auto-subRectangles.")
# First make numpy arrays if required
for i in range(len(images)):
im = images[i]
if isinstance(im, Image.Image):
tmp = im.convert() # Make without palette
a = np.asarray(tmp)
if len(a.shape) == 0:
raise MemoryError("Too little memory to convert PIL image to array")
images[i] = a
# Determine the sub rectangles
images, xy = self.getSubRectangles(images)
# Done
return images, xy
def getSubRectangles(self, ims):
""" getSubRectangles(ims)
Calculate the minimal rectangles that need updating each frame.
Returns a two-element tuple containing the cropped images and a
list of x-y positions.
Calculating the subrectangles takes extra time, obviously. However,
if the image sizes were reduced, the actual writing of the GIF
goes faster. In some cases applying this method produces a GIF faster.
"""
# Check image count
if len(ims) < 2:
return ims, [(0, 0) for i in ims]
# We need numpy
if np is None:
raise RuntimeError("Need Numpy to calculate sub-rectangles. ")
# Prepare
ims2 = [ims[0]]
xy = [(0, 0)]
# t0 = time.time()
# Iterate over images
prev = ims[0]
for im in ims[1:]:
# Get difference, sum over colors
diff = np.abs(im - prev)
if diff.ndim == 3:
diff = diff.sum(2)
# Get begin and end for both dimensions
X = np.argwhere(diff.sum(0))
Y = np.argwhere(diff.sum(1))
# Get rect coordinates
if X.size and Y.size:
x0, x1 = X[0], X[-1] + 1
y0, y1 = Y[0], Y[-1] + 1
else: # No change ... make it minimal
x0, x1 = 0, 2
y0, y1 = 0, 2
# Cut out and store
im2 = im[y0:y1, x0:x1]
prev = im
ims2.append(im2)
xy.append((x0, y0))
# Done
# print('%1.2f seconds to determine subrectangles of %i images' %
# (time.time()-t0, len(ims2)) )
return ims2, xy
def convertImagesToPIL(self, images, dither, nq=0):
""" convertImagesToPIL(images, nq=0)
Convert images to Paletted PIL images, which can then be
written to a single animaged GIF.
"""
# Convert to PIL images
images2 = []
for im in images:
if isinstance(im, Image.Image):
images2.append(im)
elif np and isinstance(im, np.ndarray):
if im.ndim == 3 and im.shape[2] == 3:
im = Image.fromarray(im, 'RGB')
elif im.ndim == 3 and im.shape[2] == 4:
im = Image.fromarray(im[:, :, :3], 'RGB')
elif im.ndim == 2:
im = Image.fromarray(im, 'L')
images2.append(im)
# Convert to paletted PIL images
images, images2 = images2, []
# Adaptive PIL algorithm
AD = Image.ADAPTIVE
for im in images:
im = im.convert('P', palette=AD, dither=dither)
images2.append(im)
# Done
return images2
def writeGifToFile(self, fp, images, durations, loops, xys, disposes):
""" writeGifToFile(fp, images, durations, loops, xys, disposes)
Given a set of images writes the bytes to the specified stream.
"""
# Obtain palette for all images and count each occurance
palettes, occur = [], []
for im in images:
header, usedPaletteColors = getheader(im)
palettes.append(header[-1]) # Last part of the header is the frame palette
for palette in palettes:
occur.append(palettes.count(palette))
# Select most-used palette as the global one (or first in case no max)
globalPalette = palettes[occur.index(max(occur))]
# Init
frames = 0
firstFrame = True
for im, palette in zip(images, palettes):
if firstFrame:
# Write header
# Gather info
header = self.getheaderAnim(im)
appext = self.getAppExt(loops)
# Write
fp.write(header)
fp.write(globalPalette)
fp.write(appext)
# Next frame is not the first
firstFrame = False
if True:
# Write palette and image data
# Gather info
data = getdata(im)
imdes, data = b''.join(data[:-2]), data[-2:]
graphext = self.getGraphicsControlExt(durations[frames], disposes[frames])
# Make image descriptor suitable for using 256 local color palette
lid = self.getImageDescriptor(im, xys[frames])
# Write local header
if (palette != globalPalette) or (disposes[frames] != 2):
# Use local color palette
fp.write(graphext)
fp.write(lid) # write suitable image descriptor
fp.write(palette) # write local color table
fp.write('\x08') # LZW minimum size code
else:
# Use global color palette
fp.write(graphext)
fp.write(imdes) # write suitable image descriptor
for d in data:
fp.write(d)
# Prepare for next round
frames = frames + 1
fp.write(";") # end gif
return frames
# Exposed functions
def writeGif(
filename, images, duration=0.1, repeat=True, dither=False,
nq=0, subRectangles=True, dispose=None):
""" writeGif(filename, images, duration=0.1, repeat=True, dither=False,
nq=0, subRectangles=True, dispose=None)
Write an animated gif from the specified images.
Parameters
----------
filename : string
The name of the file to write the image to.
images : list
Should be a list consisting of PIL images or numpy arrays.
The latter should be between 0 and 255 for integer types, and
between 0 and 1 for float types.
duration : scalar or list of scalars
The duration for all frames, or (if a list) for each frame.
repeat : bool or integer
The amount of loops. If True, loops infinitetely.
dither : bool
Whether to apply dithering
nq : integer
If nonzero, applies the NeuQuant quantization algorithm to create
the color palette. This algorithm is superior, but slower than
the standard PIL algorithm. The value of nq is the quality
parameter. 1 represents the best quality. 10 is in general a
good tradeoff between quality and speed. When using this option,
better results are usually obtained when subRectangles is False.
subRectangles : False, True, or a list of 2-element tuples
Whether to use sub-rectangles. If True, the minimal rectangle that
is required to update each frame is automatically detected. This
can give significant reductions in file size, particularly if only
a part of the image changes. One can also give a list of x-y
coordinates if you want to do the cropping yourself. The default
is True.
dispose : int
How to dispose each frame. 1 means that each frame is to be left
in place. 2 means the background color should be restored after
each frame. 3 means the decoder should restore the previous frame.
If subRectangles==False, the default is 2, otherwise it is 1.
"""
# Check PIL
if PIL is None:
raise RuntimeError("Need PIL to write animated gif files.")
# Check images
images = checkImages(images)
# Instantiate writer object
gifWriter = GifWriter()
# Check loops
if repeat is False:
loops = 1
elif repeat is True:
loops = 0 # zero means infinite
else:
loops = int(repeat)
# Check duration
if hasattr(duration, '__len__'):
if len(duration) == len(images):
duration = [d for d in duration]
else:
raise ValueError("len(duration) doesn't match amount of images.")
else:
duration = [duration for im in images]
# Check subrectangles
if subRectangles:
images, xy = gifWriter.handleSubRectangles(images, subRectangles)
defaultDispose = 1 # Leave image in place
else:
# Normal mode
xy = [(0, 0) for im in images]
defaultDispose = 2 # Restore to background color.
# Check dispose
if dispose is None:
dispose = defaultDispose
if hasattr(dispose, '__len__'):
if len(dispose) != len(images):
raise ValueError("len(xy) doesn't match amount of images.")
else:
dispose = [dispose for im in images]
# Make images in a format that we can write easy
images = gifWriter.convertImagesToPIL(images, dither, nq)
# Write
fp = open(filename, 'wb')
try:
gifWriter.writeGifToFile(fp, images, duration, loops, xy, dispose)
finally:
fp.close()
def readGif(filename, asNumpy=True):
""" readGif(filename, asNumpy=True)
Read images from an animated GIF file. Returns a list of numpy
arrays, or, if asNumpy is false, a list if PIL images.
"""
# Check PIL
if PIL is None:
raise RuntimeError("Need PIL to read animated gif files.")
# Check Numpy
if np is None:
raise RuntimeError("Need Numpy to read animated gif files.")
# Check whether it exists
if not os.path.isfile(filename):
raise IOError('File not found: ' + str(filename))
# Load file using PIL
pilIm = PIL.Image.open(filename)
pilIm.seek(0)
# Read all images inside
images = []
try:
while True:
# Get image as numpy array
tmp = pilIm.convert() # Make without palette
a = np.asarray(tmp)
if len(a.shape) == 0:
raise MemoryError("Too little memory to convert PIL image to array")
# Store, and next
images.append(a)
pilIm.seek(pilIm.tell() + 1)
except EOFError:
pass
# Convert to normal PIL images if needed
if not asNumpy:
images2 = images
images = []
for im in images2:
images.append(PIL.Image.fromarray(im))
# Done
return images
|
thumbor/thumbor | thumbor/engines/extensions/pil.py | intToBin | python | def intToBin(i):
# devide in two parts (bytes)
i1 = i % 256
i2 = int(i / 256)
# make string (little endian)
return chr(i1) + chr(i2) | Integer to two bytes | train | https://github.com/thumbor/thumbor/blob/558ccdd6e3bc29e1c9ee3687372c4b3eb05ac607/thumbor/engines/extensions/pil.py#L132-L138 | null | # -*- coding: utf-8 -*-
# Copyright (C) 2012, Almar Klein, Ant1, Marius van Voorden
#
# This code is subject to the (new) BSD license:
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the <organization> nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
""" Module images2gif
Provides functionality for reading and writing animated GIF images.
Use writeGif to write a series of numpy arrays or PIL images as an
animated GIF. Use readGif to read an animated gif as a series of numpy
arrays.
Note that since July 2004, all patents on the LZW compression patent have
expired. Therefore the GIF format may now be used freely.
Acknowledgements
----------------
Many thanks to Ant1 for:
* noting the use of "palette=PIL.Image.ADAPTIVE", which significantly
improves the results.
* the modifications to save each image with its own palette, or optionally
the global palette (if its the same).
Many thanks to Marius van Voorden for porting the NeuQuant quantization
algorithm of Anthony Dekker to Python (See the NeuQuant class for its
license).
Many thanks to Alex Robinson for implementing the concept of subrectangles,
which (depening on image content) can give a very significant reduction in
file size.
This code is based on gifmaker (in the scripts folder of the source
distribution of PIL)
Usefull links
-------------
* http://tronche.com/computer-graphics/gif/
* http://en.wikipedia.org/wiki/Graphics_Interchange_Format
* http://www.w3.org/Graphics/GIF/spec-gif89a.txt
"""
# todo: This module should be part of imageio (or at least based on)
from __future__ import absolute_import
import os
try:
import PIL
from PIL import Image
from PIL.GifImagePlugin import getheader, getdata
except ImportError:
PIL = None
try:
import numpy as np
except ImportError:
np = None
# getheader gives a 87a header and a color palette (two elements in a list).
# getdata()[0] gives the Image Descriptor up to (including) "LZW min code size".
# getdatas()[1:] is the image data itself in chuncks of 256 bytes (well
# technically the first byte says how many bytes follow, after which that
# amount (max 255) follows).
def checkImages(images):
""" checkImages(images)
Check numpy images and correct intensity range etc.
The same for all movie formats.
"""
# Init results
images2 = []
for im in images:
if PIL and isinstance(im, PIL.Image.Image):
# We assume PIL images are allright
images2.append(im)
elif np and isinstance(im, np.ndarray):
# Check and convert dtype
if im.dtype == np.uint8:
images2.append(im) # Ok
elif im.dtype in [np.float32, np.float64]:
im = im.copy()
im[im < 0] = 0
im[im > 1] = 1
im *= 255
images2.append(im.astype(np.uint8))
else:
im = im.astype(np.uint8)
images2.append(im)
# Check size
if im.ndim == 2:
pass # ok
elif im.ndim == 3:
if im.shape[2] not in [3, 4]:
raise ValueError('This array can not represent an image.')
else:
raise ValueError('This array can not represent an image.')
else:
raise ValueError('Invalid image type: ' + str(type(im)))
# Done
return images2
class GifWriter:
""" GifWriter()
Class that contains methods for helping write the animated GIF file.
"""
def getheaderAnim(self, im):
""" getheaderAnim(im)
Get animation header. To replace PILs getheader()[0]
"""
bb = "GIF89a"
bb += intToBin(im.size[0])
bb += intToBin(im.size[1])
bb += "\x87\x00\x00"
return bb
def getImageDescriptor(self, im, xy=None):
""" getImageDescriptor(im, xy=None)
Used for the local color table properties per image.
Otherwise global color table applies to all frames irrespective of
whether additional colors comes in play that require a redefined
palette. Still a maximum of 256 color per frame, obviously.
Written by Ant1 on 2010-08-22
Modified by Alex Robinson in Janurari 2011 to implement subrectangles.
"""
# Defaule use full image and place at upper left
if xy is None:
xy = (0, 0)
# Image separator,
bb = '\x2C'
# Image position and size
bb += intToBin(xy[0]) # Left position
bb += intToBin(xy[1]) # Top position
bb += intToBin(im.size[0]) # image width
bb += intToBin(im.size[1]) # image height
# packed field: local color table flag1, interlace0, sorted table0,
# reserved00, lct size111=7=2^(7+1)=256.
bb += '\x87'
# LZW minimum size code now comes later, begining of [image data] blocks
return bb
def getAppExt(self, loops=float('inf')):
""" getAppExt(loops=float('inf'))
Application extention. This part specifies the amount of loops.
If loops is 0 or inf, it goes on infinitely.
"""
if loops == 0 or loops == float('inf'):
loops = 2 ** 16 - 1
# bb = "" application extension should not be used
# (the extension interprets zero loops
# to mean an infinite number of loops)
# Mmm, does not seem to work
if True:
bb = "\x21\xFF\x0B" # application extension
bb += "NETSCAPE2.0"
bb += "\x03\x01"
bb += intToBin(loops)
bb += '\x00' # end
return bb
def getGraphicsControlExt(self, duration=0.1, dispose=2):
""" getGraphicsControlExt(duration=0.1, dispose=2)
Graphics Control Extension. A sort of header at the start of
each image. Specifies duration and transparancy.
Dispose
-------
* 0 - No disposal specified.
* 1 - Do not dispose. The graphic is to be left in place.
* 2 - Restore to background color. The area used by the graphic
must be restored to the background color.
* 3 - Restore to previous. The decoder is required to restore the
area overwritten by the graphic with what was there prior to
rendering the graphic.
* 4-7 -To be defined.
"""
bb = '\x21\xF9\x04'
bb += chr((dispose & 3) << 2) # low bit 1 == transparency,
# 2nd bit 1 == user input , next 3 bits, the low two of which are used,
# are dispose.
bb += intToBin(int(duration * 100)) # in 100th of seconds
bb += '\x00' # no transparant color
bb += '\x00' # end
return bb
def handleSubRectangles(self, images, subRectangles):
""" handleSubRectangles(images)
Handle the sub-rectangle stuff. If the rectangles are given by the
user, the values are checked. Otherwise the subrectangles are
calculated automatically.
"""
if isinstance(subRectangles, (tuple, list)):
# xy given directly
# Check xy
xy = subRectangles
if xy is None:
xy = (0, 0)
if hasattr(xy, '__len__'):
if len(xy) == len(images):
xy = [xxyy for xxyy in xy]
else:
raise ValueError("len(xy) doesn't match amount of images.")
else:
xy = [xy for im in images]
xy[0] = (0, 0)
else:
# Calculate xy using some basic image processing
# Check Numpy
if np is None:
raise RuntimeError("Need Numpy to use auto-subRectangles.")
# First make numpy arrays if required
for i in range(len(images)):
im = images[i]
if isinstance(im, Image.Image):
tmp = im.convert() # Make without palette
a = np.asarray(tmp)
if len(a.shape) == 0:
raise MemoryError("Too little memory to convert PIL image to array")
images[i] = a
# Determine the sub rectangles
images, xy = self.getSubRectangles(images)
# Done
return images, xy
def getSubRectangles(self, ims):
""" getSubRectangles(ims)
Calculate the minimal rectangles that need updating each frame.
Returns a two-element tuple containing the cropped images and a
list of x-y positions.
Calculating the subrectangles takes extra time, obviously. However,
if the image sizes were reduced, the actual writing of the GIF
goes faster. In some cases applying this method produces a GIF faster.
"""
# Check image count
if len(ims) < 2:
return ims, [(0, 0) for i in ims]
# We need numpy
if np is None:
raise RuntimeError("Need Numpy to calculate sub-rectangles. ")
# Prepare
ims2 = [ims[0]]
xy = [(0, 0)]
# t0 = time.time()
# Iterate over images
prev = ims[0]
for im in ims[1:]:
# Get difference, sum over colors
diff = np.abs(im - prev)
if diff.ndim == 3:
diff = diff.sum(2)
# Get begin and end for both dimensions
X = np.argwhere(diff.sum(0))
Y = np.argwhere(diff.sum(1))
# Get rect coordinates
if X.size and Y.size:
x0, x1 = X[0], X[-1] + 1
y0, y1 = Y[0], Y[-1] + 1
else: # No change ... make it minimal
x0, x1 = 0, 2
y0, y1 = 0, 2
# Cut out and store
im2 = im[y0:y1, x0:x1]
prev = im
ims2.append(im2)
xy.append((x0, y0))
# Done
# print('%1.2f seconds to determine subrectangles of %i images' %
# (time.time()-t0, len(ims2)) )
return ims2, xy
def convertImagesToPIL(self, images, dither, nq=0):
""" convertImagesToPIL(images, nq=0)
Convert images to Paletted PIL images, which can then be
written to a single animaged GIF.
"""
# Convert to PIL images
images2 = []
for im in images:
if isinstance(im, Image.Image):
images2.append(im)
elif np and isinstance(im, np.ndarray):
if im.ndim == 3 and im.shape[2] == 3:
im = Image.fromarray(im, 'RGB')
elif im.ndim == 3 and im.shape[2] == 4:
im = Image.fromarray(im[:, :, :3], 'RGB')
elif im.ndim == 2:
im = Image.fromarray(im, 'L')
images2.append(im)
# Convert to paletted PIL images
images, images2 = images2, []
# Adaptive PIL algorithm
AD = Image.ADAPTIVE
for im in images:
im = im.convert('P', palette=AD, dither=dither)
images2.append(im)
# Done
return images2
def writeGifToFile(self, fp, images, durations, loops, xys, disposes):
""" writeGifToFile(fp, images, durations, loops, xys, disposes)
Given a set of images writes the bytes to the specified stream.
"""
# Obtain palette for all images and count each occurance
palettes, occur = [], []
for im in images:
header, usedPaletteColors = getheader(im)
palettes.append(header[-1]) # Last part of the header is the frame palette
for palette in palettes:
occur.append(palettes.count(palette))
# Select most-used palette as the global one (or first in case no max)
globalPalette = palettes[occur.index(max(occur))]
# Init
frames = 0
firstFrame = True
for im, palette in zip(images, palettes):
if firstFrame:
# Write header
# Gather info
header = self.getheaderAnim(im)
appext = self.getAppExt(loops)
# Write
fp.write(header)
fp.write(globalPalette)
fp.write(appext)
# Next frame is not the first
firstFrame = False
if True:
# Write palette and image data
# Gather info
data = getdata(im)
imdes, data = b''.join(data[:-2]), data[-2:]
graphext = self.getGraphicsControlExt(durations[frames], disposes[frames])
# Make image descriptor suitable for using 256 local color palette
lid = self.getImageDescriptor(im, xys[frames])
# Write local header
if (palette != globalPalette) or (disposes[frames] != 2):
# Use local color palette
fp.write(graphext)
fp.write(lid) # write suitable image descriptor
fp.write(palette) # write local color table
fp.write('\x08') # LZW minimum size code
else:
# Use global color palette
fp.write(graphext)
fp.write(imdes) # write suitable image descriptor
for d in data:
fp.write(d)
# Prepare for next round
frames = frames + 1
fp.write(";") # end gif
return frames
# Exposed functions
def writeGif(
filename, images, duration=0.1, repeat=True, dither=False,
nq=0, subRectangles=True, dispose=None):
""" writeGif(filename, images, duration=0.1, repeat=True, dither=False,
nq=0, subRectangles=True, dispose=None)
Write an animated gif from the specified images.
Parameters
----------
filename : string
The name of the file to write the image to.
images : list
Should be a list consisting of PIL images or numpy arrays.
The latter should be between 0 and 255 for integer types, and
between 0 and 1 for float types.
duration : scalar or list of scalars
The duration for all frames, or (if a list) for each frame.
repeat : bool or integer
The amount of loops. If True, loops infinitetely.
dither : bool
Whether to apply dithering
nq : integer
If nonzero, applies the NeuQuant quantization algorithm to create
the color palette. This algorithm is superior, but slower than
the standard PIL algorithm. The value of nq is the quality
parameter. 1 represents the best quality. 10 is in general a
good tradeoff between quality and speed. When using this option,
better results are usually obtained when subRectangles is False.
subRectangles : False, True, or a list of 2-element tuples
Whether to use sub-rectangles. If True, the minimal rectangle that
is required to update each frame is automatically detected. This
can give significant reductions in file size, particularly if only
a part of the image changes. One can also give a list of x-y
coordinates if you want to do the cropping yourself. The default
is True.
dispose : int
How to dispose each frame. 1 means that each frame is to be left
in place. 2 means the background color should be restored after
each frame. 3 means the decoder should restore the previous frame.
If subRectangles==False, the default is 2, otherwise it is 1.
"""
# Check PIL
if PIL is None:
raise RuntimeError("Need PIL to write animated gif files.")
# Check images
images = checkImages(images)
# Instantiate writer object
gifWriter = GifWriter()
# Check loops
if repeat is False:
loops = 1
elif repeat is True:
loops = 0 # zero means infinite
else:
loops = int(repeat)
# Check duration
if hasattr(duration, '__len__'):
if len(duration) == len(images):
duration = [d for d in duration]
else:
raise ValueError("len(duration) doesn't match amount of images.")
else:
duration = [duration for im in images]
# Check subrectangles
if subRectangles:
images, xy = gifWriter.handleSubRectangles(images, subRectangles)
defaultDispose = 1 # Leave image in place
else:
# Normal mode
xy = [(0, 0) for im in images]
defaultDispose = 2 # Restore to background color.
# Check dispose
if dispose is None:
dispose = defaultDispose
if hasattr(dispose, '__len__'):
if len(dispose) != len(images):
raise ValueError("len(xy) doesn't match amount of images.")
else:
dispose = [dispose for im in images]
# Make images in a format that we can write easy
images = gifWriter.convertImagesToPIL(images, dither, nq)
# Write
fp = open(filename, 'wb')
try:
gifWriter.writeGifToFile(fp, images, duration, loops, xy, dispose)
finally:
fp.close()
def readGif(filename, asNumpy=True):
""" readGif(filename, asNumpy=True)
Read images from an animated GIF file. Returns a list of numpy
arrays, or, if asNumpy is false, a list if PIL images.
"""
# Check PIL
if PIL is None:
raise RuntimeError("Need PIL to read animated gif files.")
# Check Numpy
if np is None:
raise RuntimeError("Need Numpy to read animated gif files.")
# Check whether it exists
if not os.path.isfile(filename):
raise IOError('File not found: ' + str(filename))
# Load file using PIL
pilIm = PIL.Image.open(filename)
pilIm.seek(0)
# Read all images inside
images = []
try:
while True:
# Get image as numpy array
tmp = pilIm.convert() # Make without palette
a = np.asarray(tmp)
if len(a.shape) == 0:
raise MemoryError("Too little memory to convert PIL image to array")
# Store, and next
images.append(a)
pilIm.seek(pilIm.tell() + 1)
except EOFError:
pass
# Convert to normal PIL images if needed
if not asNumpy:
images2 = images
images = []
for im in images2:
images.append(PIL.Image.fromarray(im))
# Done
return images
|
thumbor/thumbor | thumbor/engines/extensions/pil.py | writeGif | python | def writeGif(
filename, images, duration=0.1, repeat=True, dither=False,
nq=0, subRectangles=True, dispose=None):
# Check PIL
if PIL is None:
raise RuntimeError("Need PIL to write animated gif files.")
# Check images
images = checkImages(images)
# Instantiate writer object
gifWriter = GifWriter()
# Check loops
if repeat is False:
loops = 1
elif repeat is True:
loops = 0 # zero means infinite
else:
loops = int(repeat)
# Check duration
if hasattr(duration, '__len__'):
if len(duration) == len(images):
duration = [d for d in duration]
else:
raise ValueError("len(duration) doesn't match amount of images.")
else:
duration = [duration for im in images]
# Check subrectangles
if subRectangles:
images, xy = gifWriter.handleSubRectangles(images, subRectangles)
defaultDispose = 1 # Leave image in place
else:
# Normal mode
xy = [(0, 0) for im in images]
defaultDispose = 2 # Restore to background color.
# Check dispose
if dispose is None:
dispose = defaultDispose
if hasattr(dispose, '__len__'):
if len(dispose) != len(images):
raise ValueError("len(xy) doesn't match amount of images.")
else:
dispose = [dispose for im in images]
# Make images in a format that we can write easy
images = gifWriter.convertImagesToPIL(images, dither, nq)
# Write
fp = open(filename, 'wb')
try:
gifWriter.writeGifToFile(fp, images, duration, loops, xy, dispose)
finally:
fp.close() | writeGif(filename, images, duration=0.1, repeat=True, dither=False,
nq=0, subRectangles=True, dispose=None)
Write an animated gif from the specified images.
Parameters
----------
filename : string
The name of the file to write the image to.
images : list
Should be a list consisting of PIL images or numpy arrays.
The latter should be between 0 and 255 for integer types, and
between 0 and 1 for float types.
duration : scalar or list of scalars
The duration for all frames, or (if a list) for each frame.
repeat : bool or integer
The amount of loops. If True, loops infinitetely.
dither : bool
Whether to apply dithering
nq : integer
If nonzero, applies the NeuQuant quantization algorithm to create
the color palette. This algorithm is superior, but slower than
the standard PIL algorithm. The value of nq is the quality
parameter. 1 represents the best quality. 10 is in general a
good tradeoff between quality and speed. When using this option,
better results are usually obtained when subRectangles is False.
subRectangles : False, True, or a list of 2-element tuples
Whether to use sub-rectangles. If True, the minimal rectangle that
is required to update each frame is automatically detected. This
can give significant reductions in file size, particularly if only
a part of the image changes. One can also give a list of x-y
coordinates if you want to do the cropping yourself. The default
is True.
dispose : int
How to dispose each frame. 1 means that each frame is to be left
in place. 2 means the background color should be restored after
each frame. 3 means the decoder should restore the previous frame.
If subRectangles==False, the default is 2, otherwise it is 1. | train | https://github.com/thumbor/thumbor/blob/558ccdd6e3bc29e1c9ee3687372c4b3eb05ac607/thumbor/engines/extensions/pil.py#L454-L551 | [
"def checkImages(images):\n \"\"\" checkImages(images)\n Check numpy images and correct intensity range etc.\n The same for all movie formats.\n \"\"\"\n # Init results\n images2 = []\n\n for im in images:\n if PIL and isinstance(im, PIL.Image.Image):\n # We assume PIL images ... | # -*- coding: utf-8 -*-
# Copyright (C) 2012, Almar Klein, Ant1, Marius van Voorden
#
# This code is subject to the (new) BSD license:
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the <organization> nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
""" Module images2gif
Provides functionality for reading and writing animated GIF images.
Use writeGif to write a series of numpy arrays or PIL images as an
animated GIF. Use readGif to read an animated gif as a series of numpy
arrays.
Note that since July 2004, all patents on the LZW compression patent have
expired. Therefore the GIF format may now be used freely.
Acknowledgements
----------------
Many thanks to Ant1 for:
* noting the use of "palette=PIL.Image.ADAPTIVE", which significantly
improves the results.
* the modifications to save each image with its own palette, or optionally
the global palette (if its the same).
Many thanks to Marius van Voorden for porting the NeuQuant quantization
algorithm of Anthony Dekker to Python (See the NeuQuant class for its
license).
Many thanks to Alex Robinson for implementing the concept of subrectangles,
which (depening on image content) can give a very significant reduction in
file size.
This code is based on gifmaker (in the scripts folder of the source
distribution of PIL)
Usefull links
-------------
* http://tronche.com/computer-graphics/gif/
* http://en.wikipedia.org/wiki/Graphics_Interchange_Format
* http://www.w3.org/Graphics/GIF/spec-gif89a.txt
"""
# todo: This module should be part of imageio (or at least based on)
from __future__ import absolute_import
import os
try:
import PIL
from PIL import Image
from PIL.GifImagePlugin import getheader, getdata
except ImportError:
PIL = None
try:
import numpy as np
except ImportError:
np = None
# getheader gives a 87a header and a color palette (two elements in a list).
# getdata()[0] gives the Image Descriptor up to (including) "LZW min code size".
# getdatas()[1:] is the image data itself in chuncks of 256 bytes (well
# technically the first byte says how many bytes follow, after which that
# amount (max 255) follows).
def checkImages(images):
""" checkImages(images)
Check numpy images and correct intensity range etc.
The same for all movie formats.
"""
# Init results
images2 = []
for im in images:
if PIL and isinstance(im, PIL.Image.Image):
# We assume PIL images are allright
images2.append(im)
elif np and isinstance(im, np.ndarray):
# Check and convert dtype
if im.dtype == np.uint8:
images2.append(im) # Ok
elif im.dtype in [np.float32, np.float64]:
im = im.copy()
im[im < 0] = 0
im[im > 1] = 1
im *= 255
images2.append(im.astype(np.uint8))
else:
im = im.astype(np.uint8)
images2.append(im)
# Check size
if im.ndim == 2:
pass # ok
elif im.ndim == 3:
if im.shape[2] not in [3, 4]:
raise ValueError('This array can not represent an image.')
else:
raise ValueError('This array can not represent an image.')
else:
raise ValueError('Invalid image type: ' + str(type(im)))
# Done
return images2
def intToBin(i):
""" Integer to two bytes """
# devide in two parts (bytes)
i1 = i % 256
i2 = int(i / 256)
# make string (little endian)
return chr(i1) + chr(i2)
class GifWriter:
""" GifWriter()
Class that contains methods for helping write the animated GIF file.
"""
def getheaderAnim(self, im):
""" getheaderAnim(im)
Get animation header. To replace PILs getheader()[0]
"""
bb = "GIF89a"
bb += intToBin(im.size[0])
bb += intToBin(im.size[1])
bb += "\x87\x00\x00"
return bb
def getImageDescriptor(self, im, xy=None):
""" getImageDescriptor(im, xy=None)
Used for the local color table properties per image.
Otherwise global color table applies to all frames irrespective of
whether additional colors comes in play that require a redefined
palette. Still a maximum of 256 color per frame, obviously.
Written by Ant1 on 2010-08-22
Modified by Alex Robinson in Janurari 2011 to implement subrectangles.
"""
# Defaule use full image and place at upper left
if xy is None:
xy = (0, 0)
# Image separator,
bb = '\x2C'
# Image position and size
bb += intToBin(xy[0]) # Left position
bb += intToBin(xy[1]) # Top position
bb += intToBin(im.size[0]) # image width
bb += intToBin(im.size[1]) # image height
# packed field: local color table flag1, interlace0, sorted table0,
# reserved00, lct size111=7=2^(7+1)=256.
bb += '\x87'
# LZW minimum size code now comes later, begining of [image data] blocks
return bb
def getAppExt(self, loops=float('inf')):
""" getAppExt(loops=float('inf'))
Application extention. This part specifies the amount of loops.
If loops is 0 or inf, it goes on infinitely.
"""
if loops == 0 or loops == float('inf'):
loops = 2 ** 16 - 1
# bb = "" application extension should not be used
# (the extension interprets zero loops
# to mean an infinite number of loops)
# Mmm, does not seem to work
if True:
bb = "\x21\xFF\x0B" # application extension
bb += "NETSCAPE2.0"
bb += "\x03\x01"
bb += intToBin(loops)
bb += '\x00' # end
return bb
def getGraphicsControlExt(self, duration=0.1, dispose=2):
""" getGraphicsControlExt(duration=0.1, dispose=2)
Graphics Control Extension. A sort of header at the start of
each image. Specifies duration and transparancy.
Dispose
-------
* 0 - No disposal specified.
* 1 - Do not dispose. The graphic is to be left in place.
* 2 - Restore to background color. The area used by the graphic
must be restored to the background color.
* 3 - Restore to previous. The decoder is required to restore the
area overwritten by the graphic with what was there prior to
rendering the graphic.
* 4-7 -To be defined.
"""
bb = '\x21\xF9\x04'
bb += chr((dispose & 3) << 2) # low bit 1 == transparency,
# 2nd bit 1 == user input , next 3 bits, the low two of which are used,
# are dispose.
bb += intToBin(int(duration * 100)) # in 100th of seconds
bb += '\x00' # no transparant color
bb += '\x00' # end
return bb
def handleSubRectangles(self, images, subRectangles):
""" handleSubRectangles(images)
Handle the sub-rectangle stuff. If the rectangles are given by the
user, the values are checked. Otherwise the subrectangles are
calculated automatically.
"""
if isinstance(subRectangles, (tuple, list)):
# xy given directly
# Check xy
xy = subRectangles
if xy is None:
xy = (0, 0)
if hasattr(xy, '__len__'):
if len(xy) == len(images):
xy = [xxyy for xxyy in xy]
else:
raise ValueError("len(xy) doesn't match amount of images.")
else:
xy = [xy for im in images]
xy[0] = (0, 0)
else:
# Calculate xy using some basic image processing
# Check Numpy
if np is None:
raise RuntimeError("Need Numpy to use auto-subRectangles.")
# First make numpy arrays if required
for i in range(len(images)):
im = images[i]
if isinstance(im, Image.Image):
tmp = im.convert() # Make without palette
a = np.asarray(tmp)
if len(a.shape) == 0:
raise MemoryError("Too little memory to convert PIL image to array")
images[i] = a
# Determine the sub rectangles
images, xy = self.getSubRectangles(images)
# Done
return images, xy
def getSubRectangles(self, ims):
""" getSubRectangles(ims)
Calculate the minimal rectangles that need updating each frame.
Returns a two-element tuple containing the cropped images and a
list of x-y positions.
Calculating the subrectangles takes extra time, obviously. However,
if the image sizes were reduced, the actual writing of the GIF
goes faster. In some cases applying this method produces a GIF faster.
"""
# Check image count
if len(ims) < 2:
return ims, [(0, 0) for i in ims]
# We need numpy
if np is None:
raise RuntimeError("Need Numpy to calculate sub-rectangles. ")
# Prepare
ims2 = [ims[0]]
xy = [(0, 0)]
# t0 = time.time()
# Iterate over images
prev = ims[0]
for im in ims[1:]:
# Get difference, sum over colors
diff = np.abs(im - prev)
if diff.ndim == 3:
diff = diff.sum(2)
# Get begin and end for both dimensions
X = np.argwhere(diff.sum(0))
Y = np.argwhere(diff.sum(1))
# Get rect coordinates
if X.size and Y.size:
x0, x1 = X[0], X[-1] + 1
y0, y1 = Y[0], Y[-1] + 1
else: # No change ... make it minimal
x0, x1 = 0, 2
y0, y1 = 0, 2
# Cut out and store
im2 = im[y0:y1, x0:x1]
prev = im
ims2.append(im2)
xy.append((x0, y0))
# Done
# print('%1.2f seconds to determine subrectangles of %i images' %
# (time.time()-t0, len(ims2)) )
return ims2, xy
def convertImagesToPIL(self, images, dither, nq=0):
""" convertImagesToPIL(images, nq=0)
Convert images to Paletted PIL images, which can then be
written to a single animaged GIF.
"""
# Convert to PIL images
images2 = []
for im in images:
if isinstance(im, Image.Image):
images2.append(im)
elif np and isinstance(im, np.ndarray):
if im.ndim == 3 and im.shape[2] == 3:
im = Image.fromarray(im, 'RGB')
elif im.ndim == 3 and im.shape[2] == 4:
im = Image.fromarray(im[:, :, :3], 'RGB')
elif im.ndim == 2:
im = Image.fromarray(im, 'L')
images2.append(im)
# Convert to paletted PIL images
images, images2 = images2, []
# Adaptive PIL algorithm
AD = Image.ADAPTIVE
for im in images:
im = im.convert('P', palette=AD, dither=dither)
images2.append(im)
# Done
return images2
def writeGifToFile(self, fp, images, durations, loops, xys, disposes):
""" writeGifToFile(fp, images, durations, loops, xys, disposes)
Given a set of images writes the bytes to the specified stream.
"""
# Obtain palette for all images and count each occurance
palettes, occur = [], []
for im in images:
header, usedPaletteColors = getheader(im)
palettes.append(header[-1]) # Last part of the header is the frame palette
for palette in palettes:
occur.append(palettes.count(palette))
# Select most-used palette as the global one (or first in case no max)
globalPalette = palettes[occur.index(max(occur))]
# Init
frames = 0
firstFrame = True
for im, palette in zip(images, palettes):
if firstFrame:
# Write header
# Gather info
header = self.getheaderAnim(im)
appext = self.getAppExt(loops)
# Write
fp.write(header)
fp.write(globalPalette)
fp.write(appext)
# Next frame is not the first
firstFrame = False
if True:
# Write palette and image data
# Gather info
data = getdata(im)
imdes, data = b''.join(data[:-2]), data[-2:]
graphext = self.getGraphicsControlExt(durations[frames], disposes[frames])
# Make image descriptor suitable for using 256 local color palette
lid = self.getImageDescriptor(im, xys[frames])
# Write local header
if (palette != globalPalette) or (disposes[frames] != 2):
# Use local color palette
fp.write(graphext)
fp.write(lid) # write suitable image descriptor
fp.write(palette) # write local color table
fp.write('\x08') # LZW minimum size code
else:
# Use global color palette
fp.write(graphext)
fp.write(imdes) # write suitable image descriptor
for d in data:
fp.write(d)
# Prepare for next round
frames = frames + 1
fp.write(";") # end gif
return frames
# Exposed functions
def readGif(filename, asNumpy=True):
""" readGif(filename, asNumpy=True)
Read images from an animated GIF file. Returns a list of numpy
arrays, or, if asNumpy is false, a list if PIL images.
"""
# Check PIL
if PIL is None:
raise RuntimeError("Need PIL to read animated gif files.")
# Check Numpy
if np is None:
raise RuntimeError("Need Numpy to read animated gif files.")
# Check whether it exists
if not os.path.isfile(filename):
raise IOError('File not found: ' + str(filename))
# Load file using PIL
pilIm = PIL.Image.open(filename)
pilIm.seek(0)
# Read all images inside
images = []
try:
while True:
# Get image as numpy array
tmp = pilIm.convert() # Make without palette
a = np.asarray(tmp)
if len(a.shape) == 0:
raise MemoryError("Too little memory to convert PIL image to array")
# Store, and next
images.append(a)
pilIm.seek(pilIm.tell() + 1)
except EOFError:
pass
# Convert to normal PIL images if needed
if not asNumpy:
images2 = images
images = []
for im in images2:
images.append(PIL.Image.fromarray(im))
# Done
return images
|
thumbor/thumbor | thumbor/engines/extensions/pil.py | readGif | python | def readGif(filename, asNumpy=True):
# Check PIL
if PIL is None:
raise RuntimeError("Need PIL to read animated gif files.")
# Check Numpy
if np is None:
raise RuntimeError("Need Numpy to read animated gif files.")
# Check whether it exists
if not os.path.isfile(filename):
raise IOError('File not found: ' + str(filename))
# Load file using PIL
pilIm = PIL.Image.open(filename)
pilIm.seek(0)
# Read all images inside
images = []
try:
while True:
# Get image as numpy array
tmp = pilIm.convert() # Make without palette
a = np.asarray(tmp)
if len(a.shape) == 0:
raise MemoryError("Too little memory to convert PIL image to array")
# Store, and next
images.append(a)
pilIm.seek(pilIm.tell() + 1)
except EOFError:
pass
# Convert to normal PIL images if needed
if not asNumpy:
images2 = images
images = []
for im in images2:
images.append(PIL.Image.fromarray(im))
# Done
return images | readGif(filename, asNumpy=True)
Read images from an animated GIF file. Returns a list of numpy
arrays, or, if asNumpy is false, a list if PIL images. | train | https://github.com/thumbor/thumbor/blob/558ccdd6e3bc29e1c9ee3687372c4b3eb05ac607/thumbor/engines/extensions/pil.py#L554-L601 | null | # -*- coding: utf-8 -*-
# Copyright (C) 2012, Almar Klein, Ant1, Marius van Voorden
#
# This code is subject to the (new) BSD license:
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the <organization> nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
""" Module images2gif
Provides functionality for reading and writing animated GIF images.
Use writeGif to write a series of numpy arrays or PIL images as an
animated GIF. Use readGif to read an animated gif as a series of numpy
arrays.
Note that since July 2004, all patents on the LZW compression patent have
expired. Therefore the GIF format may now be used freely.
Acknowledgements
----------------
Many thanks to Ant1 for:
* noting the use of "palette=PIL.Image.ADAPTIVE", which significantly
improves the results.
* the modifications to save each image with its own palette, or optionally
the global palette (if its the same).
Many thanks to Marius van Voorden for porting the NeuQuant quantization
algorithm of Anthony Dekker to Python (See the NeuQuant class for its
license).
Many thanks to Alex Robinson for implementing the concept of subrectangles,
which (depening on image content) can give a very significant reduction in
file size.
This code is based on gifmaker (in the scripts folder of the source
distribution of PIL)
Usefull links
-------------
* http://tronche.com/computer-graphics/gif/
* http://en.wikipedia.org/wiki/Graphics_Interchange_Format
* http://www.w3.org/Graphics/GIF/spec-gif89a.txt
"""
# todo: This module should be part of imageio (or at least based on)
from __future__ import absolute_import
import os
try:
import PIL
from PIL import Image
from PIL.GifImagePlugin import getheader, getdata
except ImportError:
PIL = None
try:
import numpy as np
except ImportError:
np = None
# getheader gives a 87a header and a color palette (two elements in a list).
# getdata()[0] gives the Image Descriptor up to (including) "LZW min code size".
# getdatas()[1:] is the image data itself in chuncks of 256 bytes (well
# technically the first byte says how many bytes follow, after which that
# amount (max 255) follows).
def checkImages(images):
""" checkImages(images)
Check numpy images and correct intensity range etc.
The same for all movie formats.
"""
# Init results
images2 = []
for im in images:
if PIL and isinstance(im, PIL.Image.Image):
# We assume PIL images are allright
images2.append(im)
elif np and isinstance(im, np.ndarray):
# Check and convert dtype
if im.dtype == np.uint8:
images2.append(im) # Ok
elif im.dtype in [np.float32, np.float64]:
im = im.copy()
im[im < 0] = 0
im[im > 1] = 1
im *= 255
images2.append(im.astype(np.uint8))
else:
im = im.astype(np.uint8)
images2.append(im)
# Check size
if im.ndim == 2:
pass # ok
elif im.ndim == 3:
if im.shape[2] not in [3, 4]:
raise ValueError('This array can not represent an image.')
else:
raise ValueError('This array can not represent an image.')
else:
raise ValueError('Invalid image type: ' + str(type(im)))
# Done
return images2
def intToBin(i):
""" Integer to two bytes """
# devide in two parts (bytes)
i1 = i % 256
i2 = int(i / 256)
# make string (little endian)
return chr(i1) + chr(i2)
class GifWriter:
""" GifWriter()
Class that contains methods for helping write the animated GIF file.
"""
def getheaderAnim(self, im):
""" getheaderAnim(im)
Get animation header. To replace PILs getheader()[0]
"""
bb = "GIF89a"
bb += intToBin(im.size[0])
bb += intToBin(im.size[1])
bb += "\x87\x00\x00"
return bb
def getImageDescriptor(self, im, xy=None):
""" getImageDescriptor(im, xy=None)
Used for the local color table properties per image.
Otherwise global color table applies to all frames irrespective of
whether additional colors comes in play that require a redefined
palette. Still a maximum of 256 color per frame, obviously.
Written by Ant1 on 2010-08-22
Modified by Alex Robinson in Janurari 2011 to implement subrectangles.
"""
# Defaule use full image and place at upper left
if xy is None:
xy = (0, 0)
# Image separator,
bb = '\x2C'
# Image position and size
bb += intToBin(xy[0]) # Left position
bb += intToBin(xy[1]) # Top position
bb += intToBin(im.size[0]) # image width
bb += intToBin(im.size[1]) # image height
# packed field: local color table flag1, interlace0, sorted table0,
# reserved00, lct size111=7=2^(7+1)=256.
bb += '\x87'
# LZW minimum size code now comes later, begining of [image data] blocks
return bb
def getAppExt(self, loops=float('inf')):
""" getAppExt(loops=float('inf'))
Application extention. This part specifies the amount of loops.
If loops is 0 or inf, it goes on infinitely.
"""
if loops == 0 or loops == float('inf'):
loops = 2 ** 16 - 1
# bb = "" application extension should not be used
# (the extension interprets zero loops
# to mean an infinite number of loops)
# Mmm, does not seem to work
if True:
bb = "\x21\xFF\x0B" # application extension
bb += "NETSCAPE2.0"
bb += "\x03\x01"
bb += intToBin(loops)
bb += '\x00' # end
return bb
def getGraphicsControlExt(self, duration=0.1, dispose=2):
""" getGraphicsControlExt(duration=0.1, dispose=2)
Graphics Control Extension. A sort of header at the start of
each image. Specifies duration and transparancy.
Dispose
-------
* 0 - No disposal specified.
* 1 - Do not dispose. The graphic is to be left in place.
* 2 - Restore to background color. The area used by the graphic
must be restored to the background color.
* 3 - Restore to previous. The decoder is required to restore the
area overwritten by the graphic with what was there prior to
rendering the graphic.
* 4-7 -To be defined.
"""
bb = '\x21\xF9\x04'
bb += chr((dispose & 3) << 2) # low bit 1 == transparency,
# 2nd bit 1 == user input , next 3 bits, the low two of which are used,
# are dispose.
bb += intToBin(int(duration * 100)) # in 100th of seconds
bb += '\x00' # no transparant color
bb += '\x00' # end
return bb
def handleSubRectangles(self, images, subRectangles):
""" handleSubRectangles(images)
Handle the sub-rectangle stuff. If the rectangles are given by the
user, the values are checked. Otherwise the subrectangles are
calculated automatically.
"""
if isinstance(subRectangles, (tuple, list)):
# xy given directly
# Check xy
xy = subRectangles
if xy is None:
xy = (0, 0)
if hasattr(xy, '__len__'):
if len(xy) == len(images):
xy = [xxyy for xxyy in xy]
else:
raise ValueError("len(xy) doesn't match amount of images.")
else:
xy = [xy for im in images]
xy[0] = (0, 0)
else:
# Calculate xy using some basic image processing
# Check Numpy
if np is None:
raise RuntimeError("Need Numpy to use auto-subRectangles.")
# First make numpy arrays if required
for i in range(len(images)):
im = images[i]
if isinstance(im, Image.Image):
tmp = im.convert() # Make without palette
a = np.asarray(tmp)
if len(a.shape) == 0:
raise MemoryError("Too little memory to convert PIL image to array")
images[i] = a
# Determine the sub rectangles
images, xy = self.getSubRectangles(images)
# Done
return images, xy
def getSubRectangles(self, ims):
""" getSubRectangles(ims)
Calculate the minimal rectangles that need updating each frame.
Returns a two-element tuple containing the cropped images and a
list of x-y positions.
Calculating the subrectangles takes extra time, obviously. However,
if the image sizes were reduced, the actual writing of the GIF
goes faster. In some cases applying this method produces a GIF faster.
"""
# Check image count
if len(ims) < 2:
return ims, [(0, 0) for i in ims]
# We need numpy
if np is None:
raise RuntimeError("Need Numpy to calculate sub-rectangles. ")
# Prepare
ims2 = [ims[0]]
xy = [(0, 0)]
# t0 = time.time()
# Iterate over images
prev = ims[0]
for im in ims[1:]:
# Get difference, sum over colors
diff = np.abs(im - prev)
if diff.ndim == 3:
diff = diff.sum(2)
# Get begin and end for both dimensions
X = np.argwhere(diff.sum(0))
Y = np.argwhere(diff.sum(1))
# Get rect coordinates
if X.size and Y.size:
x0, x1 = X[0], X[-1] + 1
y0, y1 = Y[0], Y[-1] + 1
else: # No change ... make it minimal
x0, x1 = 0, 2
y0, y1 = 0, 2
# Cut out and store
im2 = im[y0:y1, x0:x1]
prev = im
ims2.append(im2)
xy.append((x0, y0))
# Done
# print('%1.2f seconds to determine subrectangles of %i images' %
# (time.time()-t0, len(ims2)) )
return ims2, xy
def convertImagesToPIL(self, images, dither, nq=0):
""" convertImagesToPIL(images, nq=0)
Convert images to Paletted PIL images, which can then be
written to a single animaged GIF.
"""
# Convert to PIL images
images2 = []
for im in images:
if isinstance(im, Image.Image):
images2.append(im)
elif np and isinstance(im, np.ndarray):
if im.ndim == 3 and im.shape[2] == 3:
im = Image.fromarray(im, 'RGB')
elif im.ndim == 3 and im.shape[2] == 4:
im = Image.fromarray(im[:, :, :3], 'RGB')
elif im.ndim == 2:
im = Image.fromarray(im, 'L')
images2.append(im)
# Convert to paletted PIL images
images, images2 = images2, []
# Adaptive PIL algorithm
AD = Image.ADAPTIVE
for im in images:
im = im.convert('P', palette=AD, dither=dither)
images2.append(im)
# Done
return images2
def writeGifToFile(self, fp, images, durations, loops, xys, disposes):
""" writeGifToFile(fp, images, durations, loops, xys, disposes)
Given a set of images writes the bytes to the specified stream.
"""
# Obtain palette for all images and count each occurance
palettes, occur = [], []
for im in images:
header, usedPaletteColors = getheader(im)
palettes.append(header[-1]) # Last part of the header is the frame palette
for palette in palettes:
occur.append(palettes.count(palette))
# Select most-used palette as the global one (or first in case no max)
globalPalette = palettes[occur.index(max(occur))]
# Init
frames = 0
firstFrame = True
for im, palette in zip(images, palettes):
if firstFrame:
# Write header
# Gather info
header = self.getheaderAnim(im)
appext = self.getAppExt(loops)
# Write
fp.write(header)
fp.write(globalPalette)
fp.write(appext)
# Next frame is not the first
firstFrame = False
if True:
# Write palette and image data
# Gather info
data = getdata(im)
imdes, data = b''.join(data[:-2]), data[-2:]
graphext = self.getGraphicsControlExt(durations[frames], disposes[frames])
# Make image descriptor suitable for using 256 local color palette
lid = self.getImageDescriptor(im, xys[frames])
# Write local header
if (palette != globalPalette) or (disposes[frames] != 2):
# Use local color palette
fp.write(graphext)
fp.write(lid) # write suitable image descriptor
fp.write(palette) # write local color table
fp.write('\x08') # LZW minimum size code
else:
# Use global color palette
fp.write(graphext)
fp.write(imdes) # write suitable image descriptor
for d in data:
fp.write(d)
# Prepare for next round
frames = frames + 1
fp.write(";") # end gif
return frames
# Exposed functions
def writeGif(
filename, images, duration=0.1, repeat=True, dither=False,
nq=0, subRectangles=True, dispose=None):
""" writeGif(filename, images, duration=0.1, repeat=True, dither=False,
nq=0, subRectangles=True, dispose=None)
Write an animated gif from the specified images.
Parameters
----------
filename : string
The name of the file to write the image to.
images : list
Should be a list consisting of PIL images or numpy arrays.
The latter should be between 0 and 255 for integer types, and
between 0 and 1 for float types.
duration : scalar or list of scalars
The duration for all frames, or (if a list) for each frame.
repeat : bool or integer
The amount of loops. If True, loops infinitetely.
dither : bool
Whether to apply dithering
nq : integer
If nonzero, applies the NeuQuant quantization algorithm to create
the color palette. This algorithm is superior, but slower than
the standard PIL algorithm. The value of nq is the quality
parameter. 1 represents the best quality. 10 is in general a
good tradeoff between quality and speed. When using this option,
better results are usually obtained when subRectangles is False.
subRectangles : False, True, or a list of 2-element tuples
Whether to use sub-rectangles. If True, the minimal rectangle that
is required to update each frame is automatically detected. This
can give significant reductions in file size, particularly if only
a part of the image changes. One can also give a list of x-y
coordinates if you want to do the cropping yourself. The default
is True.
dispose : int
How to dispose each frame. 1 means that each frame is to be left
in place. 2 means the background color should be restored after
each frame. 3 means the decoder should restore the previous frame.
If subRectangles==False, the default is 2, otherwise it is 1.
"""
# Check PIL
if PIL is None:
raise RuntimeError("Need PIL to write animated gif files.")
# Check images
images = checkImages(images)
# Instantiate writer object
gifWriter = GifWriter()
# Check loops
if repeat is False:
loops = 1
elif repeat is True:
loops = 0 # zero means infinite
else:
loops = int(repeat)
# Check duration
if hasattr(duration, '__len__'):
if len(duration) == len(images):
duration = [d for d in duration]
else:
raise ValueError("len(duration) doesn't match amount of images.")
else:
duration = [duration for im in images]
# Check subrectangles
if subRectangles:
images, xy = gifWriter.handleSubRectangles(images, subRectangles)
defaultDispose = 1 # Leave image in place
else:
# Normal mode
xy = [(0, 0) for im in images]
defaultDispose = 2 # Restore to background color.
# Check dispose
if dispose is None:
dispose = defaultDispose
if hasattr(dispose, '__len__'):
if len(dispose) != len(images):
raise ValueError("len(xy) doesn't match amount of images.")
else:
dispose = [dispose for im in images]
# Make images in a format that we can write easy
images = gifWriter.convertImagesToPIL(images, dither, nq)
# Write
fp = open(filename, 'wb')
try:
gifWriter.writeGifToFile(fp, images, duration, loops, xy, dispose)
finally:
fp.close()
|
thumbor/thumbor | thumbor/engines/extensions/pil.py | GifWriter.getheaderAnim | python | def getheaderAnim(self, im):
bb = "GIF89a"
bb += intToBin(im.size[0])
bb += intToBin(im.size[1])
bb += "\x87\x00\x00"
return bb | getheaderAnim(im)
Get animation header. To replace PILs getheader()[0] | train | https://github.com/thumbor/thumbor/blob/558ccdd6e3bc29e1c9ee3687372c4b3eb05ac607/thumbor/engines/extensions/pil.py#L148-L158 | [
"def intToBin(i):\n \"\"\" Integer to two bytes \"\"\"\n # devide in two parts (bytes)\n i1 = i % 256\n i2 = int(i / 256)\n # make string (little endian)\n return chr(i1) + chr(i2)\n"
] | class GifWriter:
""" GifWriter()
Class that contains methods for helping write the animated GIF file.
"""
def getImageDescriptor(self, im, xy=None):
""" getImageDescriptor(im, xy=None)
Used for the local color table properties per image.
Otherwise global color table applies to all frames irrespective of
whether additional colors comes in play that require a redefined
palette. Still a maximum of 256 color per frame, obviously.
Written by Ant1 on 2010-08-22
Modified by Alex Robinson in Janurari 2011 to implement subrectangles.
"""
# Defaule use full image and place at upper left
if xy is None:
xy = (0, 0)
# Image separator,
bb = '\x2C'
# Image position and size
bb += intToBin(xy[0]) # Left position
bb += intToBin(xy[1]) # Top position
bb += intToBin(im.size[0]) # image width
bb += intToBin(im.size[1]) # image height
# packed field: local color table flag1, interlace0, sorted table0,
# reserved00, lct size111=7=2^(7+1)=256.
bb += '\x87'
# LZW minimum size code now comes later, begining of [image data] blocks
return bb
def getAppExt(self, loops=float('inf')):
""" getAppExt(loops=float('inf'))
Application extention. This part specifies the amount of loops.
If loops is 0 or inf, it goes on infinitely.
"""
if loops == 0 or loops == float('inf'):
loops = 2 ** 16 - 1
# bb = "" application extension should not be used
# (the extension interprets zero loops
# to mean an infinite number of loops)
# Mmm, does not seem to work
if True:
bb = "\x21\xFF\x0B" # application extension
bb += "NETSCAPE2.0"
bb += "\x03\x01"
bb += intToBin(loops)
bb += '\x00' # end
return bb
def getGraphicsControlExt(self, duration=0.1, dispose=2):
""" getGraphicsControlExt(duration=0.1, dispose=2)
Graphics Control Extension. A sort of header at the start of
each image. Specifies duration and transparancy.
Dispose
-------
* 0 - No disposal specified.
* 1 - Do not dispose. The graphic is to be left in place.
* 2 - Restore to background color. The area used by the graphic
must be restored to the background color.
* 3 - Restore to previous. The decoder is required to restore the
area overwritten by the graphic with what was there prior to
rendering the graphic.
* 4-7 -To be defined.
"""
bb = '\x21\xF9\x04'
bb += chr((dispose & 3) << 2) # low bit 1 == transparency,
# 2nd bit 1 == user input , next 3 bits, the low two of which are used,
# are dispose.
bb += intToBin(int(duration * 100)) # in 100th of seconds
bb += '\x00' # no transparant color
bb += '\x00' # end
return bb
def handleSubRectangles(self, images, subRectangles):
""" handleSubRectangles(images)
Handle the sub-rectangle stuff. If the rectangles are given by the
user, the values are checked. Otherwise the subrectangles are
calculated automatically.
"""
if isinstance(subRectangles, (tuple, list)):
# xy given directly
# Check xy
xy = subRectangles
if xy is None:
xy = (0, 0)
if hasattr(xy, '__len__'):
if len(xy) == len(images):
xy = [xxyy for xxyy in xy]
else:
raise ValueError("len(xy) doesn't match amount of images.")
else:
xy = [xy for im in images]
xy[0] = (0, 0)
else:
# Calculate xy using some basic image processing
# Check Numpy
if np is None:
raise RuntimeError("Need Numpy to use auto-subRectangles.")
# First make numpy arrays if required
for i in range(len(images)):
im = images[i]
if isinstance(im, Image.Image):
tmp = im.convert() # Make without palette
a = np.asarray(tmp)
if len(a.shape) == 0:
raise MemoryError("Too little memory to convert PIL image to array")
images[i] = a
# Determine the sub rectangles
images, xy = self.getSubRectangles(images)
# Done
return images, xy
def getSubRectangles(self, ims):
""" getSubRectangles(ims)
Calculate the minimal rectangles that need updating each frame.
Returns a two-element tuple containing the cropped images and a
list of x-y positions.
Calculating the subrectangles takes extra time, obviously. However,
if the image sizes were reduced, the actual writing of the GIF
goes faster. In some cases applying this method produces a GIF faster.
"""
# Check image count
if len(ims) < 2:
return ims, [(0, 0) for i in ims]
# We need numpy
if np is None:
raise RuntimeError("Need Numpy to calculate sub-rectangles. ")
# Prepare
ims2 = [ims[0]]
xy = [(0, 0)]
# t0 = time.time()
# Iterate over images
prev = ims[0]
for im in ims[1:]:
# Get difference, sum over colors
diff = np.abs(im - prev)
if diff.ndim == 3:
diff = diff.sum(2)
# Get begin and end for both dimensions
X = np.argwhere(diff.sum(0))
Y = np.argwhere(diff.sum(1))
# Get rect coordinates
if X.size and Y.size:
x0, x1 = X[0], X[-1] + 1
y0, y1 = Y[0], Y[-1] + 1
else: # No change ... make it minimal
x0, x1 = 0, 2
y0, y1 = 0, 2
# Cut out and store
im2 = im[y0:y1, x0:x1]
prev = im
ims2.append(im2)
xy.append((x0, y0))
# Done
# print('%1.2f seconds to determine subrectangles of %i images' %
# (time.time()-t0, len(ims2)) )
return ims2, xy
def convertImagesToPIL(self, images, dither, nq=0):
""" convertImagesToPIL(images, nq=0)
Convert images to Paletted PIL images, which can then be
written to a single animaged GIF.
"""
# Convert to PIL images
images2 = []
for im in images:
if isinstance(im, Image.Image):
images2.append(im)
elif np and isinstance(im, np.ndarray):
if im.ndim == 3 and im.shape[2] == 3:
im = Image.fromarray(im, 'RGB')
elif im.ndim == 3 and im.shape[2] == 4:
im = Image.fromarray(im[:, :, :3], 'RGB')
elif im.ndim == 2:
im = Image.fromarray(im, 'L')
images2.append(im)
# Convert to paletted PIL images
images, images2 = images2, []
# Adaptive PIL algorithm
AD = Image.ADAPTIVE
for im in images:
im = im.convert('P', palette=AD, dither=dither)
images2.append(im)
# Done
return images2
def writeGifToFile(self, fp, images, durations, loops, xys, disposes):
""" writeGifToFile(fp, images, durations, loops, xys, disposes)
Given a set of images writes the bytes to the specified stream.
"""
# Obtain palette for all images and count each occurance
palettes, occur = [], []
for im in images:
header, usedPaletteColors = getheader(im)
palettes.append(header[-1]) # Last part of the header is the frame palette
for palette in palettes:
occur.append(palettes.count(palette))
# Select most-used palette as the global one (or first in case no max)
globalPalette = palettes[occur.index(max(occur))]
# Init
frames = 0
firstFrame = True
for im, palette in zip(images, palettes):
if firstFrame:
# Write header
# Gather info
header = self.getheaderAnim(im)
appext = self.getAppExt(loops)
# Write
fp.write(header)
fp.write(globalPalette)
fp.write(appext)
# Next frame is not the first
firstFrame = False
if True:
# Write palette and image data
# Gather info
data = getdata(im)
imdes, data = b''.join(data[:-2]), data[-2:]
graphext = self.getGraphicsControlExt(durations[frames], disposes[frames])
# Make image descriptor suitable for using 256 local color palette
lid = self.getImageDescriptor(im, xys[frames])
# Write local header
if (palette != globalPalette) or (disposes[frames] != 2):
# Use local color palette
fp.write(graphext)
fp.write(lid) # write suitable image descriptor
fp.write(palette) # write local color table
fp.write('\x08') # LZW minimum size code
else:
# Use global color palette
fp.write(graphext)
fp.write(imdes) # write suitable image descriptor
for d in data:
fp.write(d)
# Prepare for next round
frames = frames + 1
fp.write(";") # end gif
return frames
|
thumbor/thumbor | thumbor/engines/extensions/pil.py | GifWriter.getImageDescriptor | python | def getImageDescriptor(self, im, xy=None):
# Defaule use full image and place at upper left
if xy is None:
xy = (0, 0)
# Image separator,
bb = '\x2C'
# Image position and size
bb += intToBin(xy[0]) # Left position
bb += intToBin(xy[1]) # Top position
bb += intToBin(im.size[0]) # image width
bb += intToBin(im.size[1]) # image height
# packed field: local color table flag1, interlace0, sorted table0,
# reserved00, lct size111=7=2^(7+1)=256.
bb += '\x87'
# LZW minimum size code now comes later, begining of [image data] blocks
return bb | getImageDescriptor(im, xy=None)
Used for the local color table properties per image.
Otherwise global color table applies to all frames irrespective of
whether additional colors comes in play that require a redefined
palette. Still a maximum of 256 color per frame, obviously.
Written by Ant1 on 2010-08-22
Modified by Alex Robinson in Janurari 2011 to implement subrectangles. | train | https://github.com/thumbor/thumbor/blob/558ccdd6e3bc29e1c9ee3687372c4b3eb05ac607/thumbor/engines/extensions/pil.py#L160-L192 | [
"def intToBin(i):\n \"\"\" Integer to two bytes \"\"\"\n # devide in two parts (bytes)\n i1 = i % 256\n i2 = int(i / 256)\n # make string (little endian)\n return chr(i1) + chr(i2)\n"
] | class GifWriter:
""" GifWriter()
Class that contains methods for helping write the animated GIF file.
"""
def getheaderAnim(self, im):
""" getheaderAnim(im)
Get animation header. To replace PILs getheader()[0]
"""
bb = "GIF89a"
bb += intToBin(im.size[0])
bb += intToBin(im.size[1])
bb += "\x87\x00\x00"
return bb
def getAppExt(self, loops=float('inf')):
""" getAppExt(loops=float('inf'))
Application extention. This part specifies the amount of loops.
If loops is 0 or inf, it goes on infinitely.
"""
if loops == 0 or loops == float('inf'):
loops = 2 ** 16 - 1
# bb = "" application extension should not be used
# (the extension interprets zero loops
# to mean an infinite number of loops)
# Mmm, does not seem to work
if True:
bb = "\x21\xFF\x0B" # application extension
bb += "NETSCAPE2.0"
bb += "\x03\x01"
bb += intToBin(loops)
bb += '\x00' # end
return bb
def getGraphicsControlExt(self, duration=0.1, dispose=2):
""" getGraphicsControlExt(duration=0.1, dispose=2)
Graphics Control Extension. A sort of header at the start of
each image. Specifies duration and transparancy.
Dispose
-------
* 0 - No disposal specified.
* 1 - Do not dispose. The graphic is to be left in place.
* 2 - Restore to background color. The area used by the graphic
must be restored to the background color.
* 3 - Restore to previous. The decoder is required to restore the
area overwritten by the graphic with what was there prior to
rendering the graphic.
* 4-7 -To be defined.
"""
bb = '\x21\xF9\x04'
bb += chr((dispose & 3) << 2) # low bit 1 == transparency,
# 2nd bit 1 == user input , next 3 bits, the low two of which are used,
# are dispose.
bb += intToBin(int(duration * 100)) # in 100th of seconds
bb += '\x00' # no transparant color
bb += '\x00' # end
return bb
def handleSubRectangles(self, images, subRectangles):
""" handleSubRectangles(images)
Handle the sub-rectangle stuff. If the rectangles are given by the
user, the values are checked. Otherwise the subrectangles are
calculated automatically.
"""
if isinstance(subRectangles, (tuple, list)):
# xy given directly
# Check xy
xy = subRectangles
if xy is None:
xy = (0, 0)
if hasattr(xy, '__len__'):
if len(xy) == len(images):
xy = [xxyy for xxyy in xy]
else:
raise ValueError("len(xy) doesn't match amount of images.")
else:
xy = [xy for im in images]
xy[0] = (0, 0)
else:
# Calculate xy using some basic image processing
# Check Numpy
if np is None:
raise RuntimeError("Need Numpy to use auto-subRectangles.")
# First make numpy arrays if required
for i in range(len(images)):
im = images[i]
if isinstance(im, Image.Image):
tmp = im.convert() # Make without palette
a = np.asarray(tmp)
if len(a.shape) == 0:
raise MemoryError("Too little memory to convert PIL image to array")
images[i] = a
# Determine the sub rectangles
images, xy = self.getSubRectangles(images)
# Done
return images, xy
def getSubRectangles(self, ims):
""" getSubRectangles(ims)
Calculate the minimal rectangles that need updating each frame.
Returns a two-element tuple containing the cropped images and a
list of x-y positions.
Calculating the subrectangles takes extra time, obviously. However,
if the image sizes were reduced, the actual writing of the GIF
goes faster. In some cases applying this method produces a GIF faster.
"""
# Check image count
if len(ims) < 2:
return ims, [(0, 0) for i in ims]
# We need numpy
if np is None:
raise RuntimeError("Need Numpy to calculate sub-rectangles. ")
# Prepare
ims2 = [ims[0]]
xy = [(0, 0)]
# t0 = time.time()
# Iterate over images
prev = ims[0]
for im in ims[1:]:
# Get difference, sum over colors
diff = np.abs(im - prev)
if diff.ndim == 3:
diff = diff.sum(2)
# Get begin and end for both dimensions
X = np.argwhere(diff.sum(0))
Y = np.argwhere(diff.sum(1))
# Get rect coordinates
if X.size and Y.size:
x0, x1 = X[0], X[-1] + 1
y0, y1 = Y[0], Y[-1] + 1
else: # No change ... make it minimal
x0, x1 = 0, 2
y0, y1 = 0, 2
# Cut out and store
im2 = im[y0:y1, x0:x1]
prev = im
ims2.append(im2)
xy.append((x0, y0))
# Done
# print('%1.2f seconds to determine subrectangles of %i images' %
# (time.time()-t0, len(ims2)) )
return ims2, xy
def convertImagesToPIL(self, images, dither, nq=0):
""" convertImagesToPIL(images, nq=0)
Convert images to Paletted PIL images, which can then be
written to a single animaged GIF.
"""
# Convert to PIL images
images2 = []
for im in images:
if isinstance(im, Image.Image):
images2.append(im)
elif np and isinstance(im, np.ndarray):
if im.ndim == 3 and im.shape[2] == 3:
im = Image.fromarray(im, 'RGB')
elif im.ndim == 3 and im.shape[2] == 4:
im = Image.fromarray(im[:, :, :3], 'RGB')
elif im.ndim == 2:
im = Image.fromarray(im, 'L')
images2.append(im)
# Convert to paletted PIL images
images, images2 = images2, []
# Adaptive PIL algorithm
AD = Image.ADAPTIVE
for im in images:
im = im.convert('P', palette=AD, dither=dither)
images2.append(im)
# Done
return images2
def writeGifToFile(self, fp, images, durations, loops, xys, disposes):
""" writeGifToFile(fp, images, durations, loops, xys, disposes)
Given a set of images writes the bytes to the specified stream.
"""
# Obtain palette for all images and count each occurance
palettes, occur = [], []
for im in images:
header, usedPaletteColors = getheader(im)
palettes.append(header[-1]) # Last part of the header is the frame palette
for palette in palettes:
occur.append(palettes.count(palette))
# Select most-used palette as the global one (or first in case no max)
globalPalette = palettes[occur.index(max(occur))]
# Init
frames = 0
firstFrame = True
for im, palette in zip(images, palettes):
if firstFrame:
# Write header
# Gather info
header = self.getheaderAnim(im)
appext = self.getAppExt(loops)
# Write
fp.write(header)
fp.write(globalPalette)
fp.write(appext)
# Next frame is not the first
firstFrame = False
if True:
# Write palette and image data
# Gather info
data = getdata(im)
imdes, data = b''.join(data[:-2]), data[-2:]
graphext = self.getGraphicsControlExt(durations[frames], disposes[frames])
# Make image descriptor suitable for using 256 local color palette
lid = self.getImageDescriptor(im, xys[frames])
# Write local header
if (palette != globalPalette) or (disposes[frames] != 2):
# Use local color palette
fp.write(graphext)
fp.write(lid) # write suitable image descriptor
fp.write(palette) # write local color table
fp.write('\x08') # LZW minimum size code
else:
# Use global color palette
fp.write(graphext)
fp.write(imdes) # write suitable image descriptor
for d in data:
fp.write(d)
# Prepare for next round
frames = frames + 1
fp.write(";") # end gif
return frames
|
thumbor/thumbor | thumbor/engines/extensions/pil.py | GifWriter.getAppExt | python | def getAppExt(self, loops=float('inf')):
if loops == 0 or loops == float('inf'):
loops = 2 ** 16 - 1
# bb = "" application extension should not be used
# (the extension interprets zero loops
# to mean an infinite number of loops)
# Mmm, does not seem to work
if True:
bb = "\x21\xFF\x0B" # application extension
bb += "NETSCAPE2.0"
bb += "\x03\x01"
bb += intToBin(loops)
bb += '\x00' # end
return bb | getAppExt(loops=float('inf'))
Application extention. This part specifies the amount of loops.
If loops is 0 or inf, it goes on infinitely. | train | https://github.com/thumbor/thumbor/blob/558ccdd6e3bc29e1c9ee3687372c4b3eb05ac607/thumbor/engines/extensions/pil.py#L194-L214 | [
"def intToBin(i):\n \"\"\" Integer to two bytes \"\"\"\n # devide in two parts (bytes)\n i1 = i % 256\n i2 = int(i / 256)\n # make string (little endian)\n return chr(i1) + chr(i2)\n"
] | class GifWriter:
""" GifWriter()
Class that contains methods for helping write the animated GIF file.
"""
def getheaderAnim(self, im):
""" getheaderAnim(im)
Get animation header. To replace PILs getheader()[0]
"""
bb = "GIF89a"
bb += intToBin(im.size[0])
bb += intToBin(im.size[1])
bb += "\x87\x00\x00"
return bb
def getImageDescriptor(self, im, xy=None):
""" getImageDescriptor(im, xy=None)
Used for the local color table properties per image.
Otherwise global color table applies to all frames irrespective of
whether additional colors comes in play that require a redefined
palette. Still a maximum of 256 color per frame, obviously.
Written by Ant1 on 2010-08-22
Modified by Alex Robinson in Janurari 2011 to implement subrectangles.
"""
# Defaule use full image and place at upper left
if xy is None:
xy = (0, 0)
# Image separator,
bb = '\x2C'
# Image position and size
bb += intToBin(xy[0]) # Left position
bb += intToBin(xy[1]) # Top position
bb += intToBin(im.size[0]) # image width
bb += intToBin(im.size[1]) # image height
# packed field: local color table flag1, interlace0, sorted table0,
# reserved00, lct size111=7=2^(7+1)=256.
bb += '\x87'
# LZW minimum size code now comes later, begining of [image data] blocks
return bb
def getGraphicsControlExt(self, duration=0.1, dispose=2):
""" getGraphicsControlExt(duration=0.1, dispose=2)
Graphics Control Extension. A sort of header at the start of
each image. Specifies duration and transparancy.
Dispose
-------
* 0 - No disposal specified.
* 1 - Do not dispose. The graphic is to be left in place.
* 2 - Restore to background color. The area used by the graphic
must be restored to the background color.
* 3 - Restore to previous. The decoder is required to restore the
area overwritten by the graphic with what was there prior to
rendering the graphic.
* 4-7 -To be defined.
"""
bb = '\x21\xF9\x04'
bb += chr((dispose & 3) << 2) # low bit 1 == transparency,
# 2nd bit 1 == user input , next 3 bits, the low two of which are used,
# are dispose.
bb += intToBin(int(duration * 100)) # in 100th of seconds
bb += '\x00' # no transparant color
bb += '\x00' # end
return bb
def handleSubRectangles(self, images, subRectangles):
""" handleSubRectangles(images)
Handle the sub-rectangle stuff. If the rectangles are given by the
user, the values are checked. Otherwise the subrectangles are
calculated automatically.
"""
if isinstance(subRectangles, (tuple, list)):
# xy given directly
# Check xy
xy = subRectangles
if xy is None:
xy = (0, 0)
if hasattr(xy, '__len__'):
if len(xy) == len(images):
xy = [xxyy for xxyy in xy]
else:
raise ValueError("len(xy) doesn't match amount of images.")
else:
xy = [xy for im in images]
xy[0] = (0, 0)
else:
# Calculate xy using some basic image processing
# Check Numpy
if np is None:
raise RuntimeError("Need Numpy to use auto-subRectangles.")
# First make numpy arrays if required
for i in range(len(images)):
im = images[i]
if isinstance(im, Image.Image):
tmp = im.convert() # Make without palette
a = np.asarray(tmp)
if len(a.shape) == 0:
raise MemoryError("Too little memory to convert PIL image to array")
images[i] = a
# Determine the sub rectangles
images, xy = self.getSubRectangles(images)
# Done
return images, xy
def getSubRectangles(self, ims):
""" getSubRectangles(ims)
Calculate the minimal rectangles that need updating each frame.
Returns a two-element tuple containing the cropped images and a
list of x-y positions.
Calculating the subrectangles takes extra time, obviously. However,
if the image sizes were reduced, the actual writing of the GIF
goes faster. In some cases applying this method produces a GIF faster.
"""
# Check image count
if len(ims) < 2:
return ims, [(0, 0) for i in ims]
# We need numpy
if np is None:
raise RuntimeError("Need Numpy to calculate sub-rectangles. ")
# Prepare
ims2 = [ims[0]]
xy = [(0, 0)]
# t0 = time.time()
# Iterate over images
prev = ims[0]
for im in ims[1:]:
# Get difference, sum over colors
diff = np.abs(im - prev)
if diff.ndim == 3:
diff = diff.sum(2)
# Get begin and end for both dimensions
X = np.argwhere(diff.sum(0))
Y = np.argwhere(diff.sum(1))
# Get rect coordinates
if X.size and Y.size:
x0, x1 = X[0], X[-1] + 1
y0, y1 = Y[0], Y[-1] + 1
else: # No change ... make it minimal
x0, x1 = 0, 2
y0, y1 = 0, 2
# Cut out and store
im2 = im[y0:y1, x0:x1]
prev = im
ims2.append(im2)
xy.append((x0, y0))
# Done
# print('%1.2f seconds to determine subrectangles of %i images' %
# (time.time()-t0, len(ims2)) )
return ims2, xy
def convertImagesToPIL(self, images, dither, nq=0):
""" convertImagesToPIL(images, nq=0)
Convert images to Paletted PIL images, which can then be
written to a single animaged GIF.
"""
# Convert to PIL images
images2 = []
for im in images:
if isinstance(im, Image.Image):
images2.append(im)
elif np and isinstance(im, np.ndarray):
if im.ndim == 3 and im.shape[2] == 3:
im = Image.fromarray(im, 'RGB')
elif im.ndim == 3 and im.shape[2] == 4:
im = Image.fromarray(im[:, :, :3], 'RGB')
elif im.ndim == 2:
im = Image.fromarray(im, 'L')
images2.append(im)
# Convert to paletted PIL images
images, images2 = images2, []
# Adaptive PIL algorithm
AD = Image.ADAPTIVE
for im in images:
im = im.convert('P', palette=AD, dither=dither)
images2.append(im)
# Done
return images2
def writeGifToFile(self, fp, images, durations, loops, xys, disposes):
""" writeGifToFile(fp, images, durations, loops, xys, disposes)
Given a set of images writes the bytes to the specified stream.
"""
# Obtain palette for all images and count each occurance
palettes, occur = [], []
for im in images:
header, usedPaletteColors = getheader(im)
palettes.append(header[-1]) # Last part of the header is the frame palette
for palette in palettes:
occur.append(palettes.count(palette))
# Select most-used palette as the global one (or first in case no max)
globalPalette = palettes[occur.index(max(occur))]
# Init
frames = 0
firstFrame = True
for im, palette in zip(images, palettes):
if firstFrame:
# Write header
# Gather info
header = self.getheaderAnim(im)
appext = self.getAppExt(loops)
# Write
fp.write(header)
fp.write(globalPalette)
fp.write(appext)
# Next frame is not the first
firstFrame = False
if True:
# Write palette and image data
# Gather info
data = getdata(im)
imdes, data = b''.join(data[:-2]), data[-2:]
graphext = self.getGraphicsControlExt(durations[frames], disposes[frames])
# Make image descriptor suitable for using 256 local color palette
lid = self.getImageDescriptor(im, xys[frames])
# Write local header
if (palette != globalPalette) or (disposes[frames] != 2):
# Use local color palette
fp.write(graphext)
fp.write(lid) # write suitable image descriptor
fp.write(palette) # write local color table
fp.write('\x08') # LZW minimum size code
else:
# Use global color palette
fp.write(graphext)
fp.write(imdes) # write suitable image descriptor
for d in data:
fp.write(d)
# Prepare for next round
frames = frames + 1
fp.write(";") # end gif
return frames
|
thumbor/thumbor | thumbor/engines/extensions/pil.py | GifWriter.getGraphicsControlExt | python | def getGraphicsControlExt(self, duration=0.1, dispose=2):
bb = '\x21\xF9\x04'
bb += chr((dispose & 3) << 2) # low bit 1 == transparency,
# 2nd bit 1 == user input , next 3 bits, the low two of which are used,
# are dispose.
bb += intToBin(int(duration * 100)) # in 100th of seconds
bb += '\x00' # no transparant color
bb += '\x00' # end
return bb | getGraphicsControlExt(duration=0.1, dispose=2)
Graphics Control Extension. A sort of header at the start of
each image. Specifies duration and transparancy.
Dispose
-------
* 0 - No disposal specified.
* 1 - Do not dispose. The graphic is to be left in place.
* 2 - Restore to background color. The area used by the graphic
must be restored to the background color.
* 3 - Restore to previous. The decoder is required to restore the
area overwritten by the graphic with what was there prior to
rendering the graphic.
* 4-7 -To be defined. | train | https://github.com/thumbor/thumbor/blob/558ccdd6e3bc29e1c9ee3687372c4b3eb05ac607/thumbor/engines/extensions/pil.py#L216-L242 | [
"def intToBin(i):\n \"\"\" Integer to two bytes \"\"\"\n # devide in two parts (bytes)\n i1 = i % 256\n i2 = int(i / 256)\n # make string (little endian)\n return chr(i1) + chr(i2)\n"
] | class GifWriter:
""" GifWriter()
Class that contains methods for helping write the animated GIF file.
"""
def getheaderAnim(self, im):
""" getheaderAnim(im)
Get animation header. To replace PILs getheader()[0]
"""
bb = "GIF89a"
bb += intToBin(im.size[0])
bb += intToBin(im.size[1])
bb += "\x87\x00\x00"
return bb
def getImageDescriptor(self, im, xy=None):
""" getImageDescriptor(im, xy=None)
Used for the local color table properties per image.
Otherwise global color table applies to all frames irrespective of
whether additional colors comes in play that require a redefined
palette. Still a maximum of 256 color per frame, obviously.
Written by Ant1 on 2010-08-22
Modified by Alex Robinson in Janurari 2011 to implement subrectangles.
"""
# Defaule use full image and place at upper left
if xy is None:
xy = (0, 0)
# Image separator,
bb = '\x2C'
# Image position and size
bb += intToBin(xy[0]) # Left position
bb += intToBin(xy[1]) # Top position
bb += intToBin(im.size[0]) # image width
bb += intToBin(im.size[1]) # image height
# packed field: local color table flag1, interlace0, sorted table0,
# reserved00, lct size111=7=2^(7+1)=256.
bb += '\x87'
# LZW minimum size code now comes later, begining of [image data] blocks
return bb
def getAppExt(self, loops=float('inf')):
""" getAppExt(loops=float('inf'))
Application extention. This part specifies the amount of loops.
If loops is 0 or inf, it goes on infinitely.
"""
if loops == 0 or loops == float('inf'):
loops = 2 ** 16 - 1
# bb = "" application extension should not be used
# (the extension interprets zero loops
# to mean an infinite number of loops)
# Mmm, does not seem to work
if True:
bb = "\x21\xFF\x0B" # application extension
bb += "NETSCAPE2.0"
bb += "\x03\x01"
bb += intToBin(loops)
bb += '\x00' # end
return bb
def handleSubRectangles(self, images, subRectangles):
""" handleSubRectangles(images)
Handle the sub-rectangle stuff. If the rectangles are given by the
user, the values are checked. Otherwise the subrectangles are
calculated automatically.
"""
if isinstance(subRectangles, (tuple, list)):
# xy given directly
# Check xy
xy = subRectangles
if xy is None:
xy = (0, 0)
if hasattr(xy, '__len__'):
if len(xy) == len(images):
xy = [xxyy for xxyy in xy]
else:
raise ValueError("len(xy) doesn't match amount of images.")
else:
xy = [xy for im in images]
xy[0] = (0, 0)
else:
# Calculate xy using some basic image processing
# Check Numpy
if np is None:
raise RuntimeError("Need Numpy to use auto-subRectangles.")
# First make numpy arrays if required
for i in range(len(images)):
im = images[i]
if isinstance(im, Image.Image):
tmp = im.convert() # Make without palette
a = np.asarray(tmp)
if len(a.shape) == 0:
raise MemoryError("Too little memory to convert PIL image to array")
images[i] = a
# Determine the sub rectangles
images, xy = self.getSubRectangles(images)
# Done
return images, xy
def getSubRectangles(self, ims):
""" getSubRectangles(ims)
Calculate the minimal rectangles that need updating each frame.
Returns a two-element tuple containing the cropped images and a
list of x-y positions.
Calculating the subrectangles takes extra time, obviously. However,
if the image sizes were reduced, the actual writing of the GIF
goes faster. In some cases applying this method produces a GIF faster.
"""
# Check image count
if len(ims) < 2:
return ims, [(0, 0) for i in ims]
# We need numpy
if np is None:
raise RuntimeError("Need Numpy to calculate sub-rectangles. ")
# Prepare
ims2 = [ims[0]]
xy = [(0, 0)]
# t0 = time.time()
# Iterate over images
prev = ims[0]
for im in ims[1:]:
# Get difference, sum over colors
diff = np.abs(im - prev)
if diff.ndim == 3:
diff = diff.sum(2)
# Get begin and end for both dimensions
X = np.argwhere(diff.sum(0))
Y = np.argwhere(diff.sum(1))
# Get rect coordinates
if X.size and Y.size:
x0, x1 = X[0], X[-1] + 1
y0, y1 = Y[0], Y[-1] + 1
else: # No change ... make it minimal
x0, x1 = 0, 2
y0, y1 = 0, 2
# Cut out and store
im2 = im[y0:y1, x0:x1]
prev = im
ims2.append(im2)
xy.append((x0, y0))
# Done
# print('%1.2f seconds to determine subrectangles of %i images' %
# (time.time()-t0, len(ims2)) )
return ims2, xy
def convertImagesToPIL(self, images, dither, nq=0):
""" convertImagesToPIL(images, nq=0)
Convert images to Paletted PIL images, which can then be
written to a single animaged GIF.
"""
# Convert to PIL images
images2 = []
for im in images:
if isinstance(im, Image.Image):
images2.append(im)
elif np and isinstance(im, np.ndarray):
if im.ndim == 3 and im.shape[2] == 3:
im = Image.fromarray(im, 'RGB')
elif im.ndim == 3 and im.shape[2] == 4:
im = Image.fromarray(im[:, :, :3], 'RGB')
elif im.ndim == 2:
im = Image.fromarray(im, 'L')
images2.append(im)
# Convert to paletted PIL images
images, images2 = images2, []
# Adaptive PIL algorithm
AD = Image.ADAPTIVE
for im in images:
im = im.convert('P', palette=AD, dither=dither)
images2.append(im)
# Done
return images2
def writeGifToFile(self, fp, images, durations, loops, xys, disposes):
""" writeGifToFile(fp, images, durations, loops, xys, disposes)
Given a set of images writes the bytes to the specified stream.
"""
# Obtain palette for all images and count each occurance
palettes, occur = [], []
for im in images:
header, usedPaletteColors = getheader(im)
palettes.append(header[-1]) # Last part of the header is the frame palette
for palette in palettes:
occur.append(palettes.count(palette))
# Select most-used palette as the global one (or first in case no max)
globalPalette = palettes[occur.index(max(occur))]
# Init
frames = 0
firstFrame = True
for im, palette in zip(images, palettes):
if firstFrame:
# Write header
# Gather info
header = self.getheaderAnim(im)
appext = self.getAppExt(loops)
# Write
fp.write(header)
fp.write(globalPalette)
fp.write(appext)
# Next frame is not the first
firstFrame = False
if True:
# Write palette and image data
# Gather info
data = getdata(im)
imdes, data = b''.join(data[:-2]), data[-2:]
graphext = self.getGraphicsControlExt(durations[frames], disposes[frames])
# Make image descriptor suitable for using 256 local color palette
lid = self.getImageDescriptor(im, xys[frames])
# Write local header
if (palette != globalPalette) or (disposes[frames] != 2):
# Use local color palette
fp.write(graphext)
fp.write(lid) # write suitable image descriptor
fp.write(palette) # write local color table
fp.write('\x08') # LZW minimum size code
else:
# Use global color palette
fp.write(graphext)
fp.write(imdes) # write suitable image descriptor
for d in data:
fp.write(d)
# Prepare for next round
frames = frames + 1
fp.write(";") # end gif
return frames
|
thumbor/thumbor | thumbor/engines/extensions/pil.py | GifWriter.handleSubRectangles | python | def handleSubRectangles(self, images, subRectangles):
if isinstance(subRectangles, (tuple, list)):
# xy given directly
# Check xy
xy = subRectangles
if xy is None:
xy = (0, 0)
if hasattr(xy, '__len__'):
if len(xy) == len(images):
xy = [xxyy for xxyy in xy]
else:
raise ValueError("len(xy) doesn't match amount of images.")
else:
xy = [xy for im in images]
xy[0] = (0, 0)
else:
# Calculate xy using some basic image processing
# Check Numpy
if np is None:
raise RuntimeError("Need Numpy to use auto-subRectangles.")
# First make numpy arrays if required
for i in range(len(images)):
im = images[i]
if isinstance(im, Image.Image):
tmp = im.convert() # Make without palette
a = np.asarray(tmp)
if len(a.shape) == 0:
raise MemoryError("Too little memory to convert PIL image to array")
images[i] = a
# Determine the sub rectangles
images, xy = self.getSubRectangles(images)
# Done
return images, xy | handleSubRectangles(images)
Handle the sub-rectangle stuff. If the rectangles are given by the
user, the values are checked. Otherwise the subrectangles are
calculated automatically. | train | https://github.com/thumbor/thumbor/blob/558ccdd6e3bc29e1c9ee3687372c4b3eb05ac607/thumbor/engines/extensions/pil.py#L244-L290 | [
"def getSubRectangles(self, ims):\n \"\"\" getSubRectangles(ims)\n\n Calculate the minimal rectangles that need updating each frame.\n Returns a two-element tuple containing the cropped images and a\n list of x-y positions.\n\n Calculating the subrectangles takes extra time, obviously. However,\n ... | class GifWriter:
""" GifWriter()
Class that contains methods for helping write the animated GIF file.
"""
def getheaderAnim(self, im):
""" getheaderAnim(im)
Get animation header. To replace PILs getheader()[0]
"""
bb = "GIF89a"
bb += intToBin(im.size[0])
bb += intToBin(im.size[1])
bb += "\x87\x00\x00"
return bb
def getImageDescriptor(self, im, xy=None):
""" getImageDescriptor(im, xy=None)
Used for the local color table properties per image.
Otherwise global color table applies to all frames irrespective of
whether additional colors comes in play that require a redefined
palette. Still a maximum of 256 color per frame, obviously.
Written by Ant1 on 2010-08-22
Modified by Alex Robinson in Janurari 2011 to implement subrectangles.
"""
# Defaule use full image and place at upper left
if xy is None:
xy = (0, 0)
# Image separator,
bb = '\x2C'
# Image position and size
bb += intToBin(xy[0]) # Left position
bb += intToBin(xy[1]) # Top position
bb += intToBin(im.size[0]) # image width
bb += intToBin(im.size[1]) # image height
# packed field: local color table flag1, interlace0, sorted table0,
# reserved00, lct size111=7=2^(7+1)=256.
bb += '\x87'
# LZW minimum size code now comes later, begining of [image data] blocks
return bb
def getAppExt(self, loops=float('inf')):
""" getAppExt(loops=float('inf'))
Application extention. This part specifies the amount of loops.
If loops is 0 or inf, it goes on infinitely.
"""
if loops == 0 or loops == float('inf'):
loops = 2 ** 16 - 1
# bb = "" application extension should not be used
# (the extension interprets zero loops
# to mean an infinite number of loops)
# Mmm, does not seem to work
if True:
bb = "\x21\xFF\x0B" # application extension
bb += "NETSCAPE2.0"
bb += "\x03\x01"
bb += intToBin(loops)
bb += '\x00' # end
return bb
def getGraphicsControlExt(self, duration=0.1, dispose=2):
""" getGraphicsControlExt(duration=0.1, dispose=2)
Graphics Control Extension. A sort of header at the start of
each image. Specifies duration and transparancy.
Dispose
-------
* 0 - No disposal specified.
* 1 - Do not dispose. The graphic is to be left in place.
* 2 - Restore to background color. The area used by the graphic
must be restored to the background color.
* 3 - Restore to previous. The decoder is required to restore the
area overwritten by the graphic with what was there prior to
rendering the graphic.
* 4-7 -To be defined.
"""
bb = '\x21\xF9\x04'
bb += chr((dispose & 3) << 2) # low bit 1 == transparency,
# 2nd bit 1 == user input , next 3 bits, the low two of which are used,
# are dispose.
bb += intToBin(int(duration * 100)) # in 100th of seconds
bb += '\x00' # no transparant color
bb += '\x00' # end
return bb
def getSubRectangles(self, ims):
""" getSubRectangles(ims)
Calculate the minimal rectangles that need updating each frame.
Returns a two-element tuple containing the cropped images and a
list of x-y positions.
Calculating the subrectangles takes extra time, obviously. However,
if the image sizes were reduced, the actual writing of the GIF
goes faster. In some cases applying this method produces a GIF faster.
"""
# Check image count
if len(ims) < 2:
return ims, [(0, 0) for i in ims]
# We need numpy
if np is None:
raise RuntimeError("Need Numpy to calculate sub-rectangles. ")
# Prepare
ims2 = [ims[0]]
xy = [(0, 0)]
# t0 = time.time()
# Iterate over images
prev = ims[0]
for im in ims[1:]:
# Get difference, sum over colors
diff = np.abs(im - prev)
if diff.ndim == 3:
diff = diff.sum(2)
# Get begin and end for both dimensions
X = np.argwhere(diff.sum(0))
Y = np.argwhere(diff.sum(1))
# Get rect coordinates
if X.size and Y.size:
x0, x1 = X[0], X[-1] + 1
y0, y1 = Y[0], Y[-1] + 1
else: # No change ... make it minimal
x0, x1 = 0, 2
y0, y1 = 0, 2
# Cut out and store
im2 = im[y0:y1, x0:x1]
prev = im
ims2.append(im2)
xy.append((x0, y0))
# Done
# print('%1.2f seconds to determine subrectangles of %i images' %
# (time.time()-t0, len(ims2)) )
return ims2, xy
def convertImagesToPIL(self, images, dither, nq=0):
""" convertImagesToPIL(images, nq=0)
Convert images to Paletted PIL images, which can then be
written to a single animaged GIF.
"""
# Convert to PIL images
images2 = []
for im in images:
if isinstance(im, Image.Image):
images2.append(im)
elif np and isinstance(im, np.ndarray):
if im.ndim == 3 and im.shape[2] == 3:
im = Image.fromarray(im, 'RGB')
elif im.ndim == 3 and im.shape[2] == 4:
im = Image.fromarray(im[:, :, :3], 'RGB')
elif im.ndim == 2:
im = Image.fromarray(im, 'L')
images2.append(im)
# Convert to paletted PIL images
images, images2 = images2, []
# Adaptive PIL algorithm
AD = Image.ADAPTIVE
for im in images:
im = im.convert('P', palette=AD, dither=dither)
images2.append(im)
# Done
return images2
def writeGifToFile(self, fp, images, durations, loops, xys, disposes):
""" writeGifToFile(fp, images, durations, loops, xys, disposes)
Given a set of images writes the bytes to the specified stream.
"""
# Obtain palette for all images and count each occurance
palettes, occur = [], []
for im in images:
header, usedPaletteColors = getheader(im)
palettes.append(header[-1]) # Last part of the header is the frame palette
for palette in palettes:
occur.append(palettes.count(palette))
# Select most-used palette as the global one (or first in case no max)
globalPalette = palettes[occur.index(max(occur))]
# Init
frames = 0
firstFrame = True
for im, palette in zip(images, palettes):
if firstFrame:
# Write header
# Gather info
header = self.getheaderAnim(im)
appext = self.getAppExt(loops)
# Write
fp.write(header)
fp.write(globalPalette)
fp.write(appext)
# Next frame is not the first
firstFrame = False
if True:
# Write palette and image data
# Gather info
data = getdata(im)
imdes, data = b''.join(data[:-2]), data[-2:]
graphext = self.getGraphicsControlExt(durations[frames], disposes[frames])
# Make image descriptor suitable for using 256 local color palette
lid = self.getImageDescriptor(im, xys[frames])
# Write local header
if (palette != globalPalette) or (disposes[frames] != 2):
# Use local color palette
fp.write(graphext)
fp.write(lid) # write suitable image descriptor
fp.write(palette) # write local color table
fp.write('\x08') # LZW minimum size code
else:
# Use global color palette
fp.write(graphext)
fp.write(imdes) # write suitable image descriptor
for d in data:
fp.write(d)
# Prepare for next round
frames = frames + 1
fp.write(";") # end gif
return frames
|
thumbor/thumbor | thumbor/engines/extensions/pil.py | GifWriter.getSubRectangles | python | def getSubRectangles(self, ims):
# Check image count
if len(ims) < 2:
return ims, [(0, 0) for i in ims]
# We need numpy
if np is None:
raise RuntimeError("Need Numpy to calculate sub-rectangles. ")
# Prepare
ims2 = [ims[0]]
xy = [(0, 0)]
# t0 = time.time()
# Iterate over images
prev = ims[0]
for im in ims[1:]:
# Get difference, sum over colors
diff = np.abs(im - prev)
if diff.ndim == 3:
diff = diff.sum(2)
# Get begin and end for both dimensions
X = np.argwhere(diff.sum(0))
Y = np.argwhere(diff.sum(1))
# Get rect coordinates
if X.size and Y.size:
x0, x1 = X[0], X[-1] + 1
y0, y1 = Y[0], Y[-1] + 1
else: # No change ... make it minimal
x0, x1 = 0, 2
y0, y1 = 0, 2
# Cut out and store
im2 = im[y0:y1, x0:x1]
prev = im
ims2.append(im2)
xy.append((x0, y0))
# Done
# print('%1.2f seconds to determine subrectangles of %i images' %
# (time.time()-t0, len(ims2)) )
return ims2, xy | getSubRectangles(ims)
Calculate the minimal rectangles that need updating each frame.
Returns a two-element tuple containing the cropped images and a
list of x-y positions.
Calculating the subrectangles takes extra time, obviously. However,
if the image sizes were reduced, the actual writing of the GIF
goes faster. In some cases applying this method produces a GIF faster. | train | https://github.com/thumbor/thumbor/blob/558ccdd6e3bc29e1c9ee3687372c4b3eb05ac607/thumbor/engines/extensions/pil.py#L292-L346 | null | class GifWriter:
""" GifWriter()
Class that contains methods for helping write the animated GIF file.
"""
def getheaderAnim(self, im):
""" getheaderAnim(im)
Get animation header. To replace PILs getheader()[0]
"""
bb = "GIF89a"
bb += intToBin(im.size[0])
bb += intToBin(im.size[1])
bb += "\x87\x00\x00"
return bb
def getImageDescriptor(self, im, xy=None):
""" getImageDescriptor(im, xy=None)
Used for the local color table properties per image.
Otherwise global color table applies to all frames irrespective of
whether additional colors comes in play that require a redefined
palette. Still a maximum of 256 color per frame, obviously.
Written by Ant1 on 2010-08-22
Modified by Alex Robinson in Janurari 2011 to implement subrectangles.
"""
# Defaule use full image and place at upper left
if xy is None:
xy = (0, 0)
# Image separator,
bb = '\x2C'
# Image position and size
bb += intToBin(xy[0]) # Left position
bb += intToBin(xy[1]) # Top position
bb += intToBin(im.size[0]) # image width
bb += intToBin(im.size[1]) # image height
# packed field: local color table flag1, interlace0, sorted table0,
# reserved00, lct size111=7=2^(7+1)=256.
bb += '\x87'
# LZW minimum size code now comes later, begining of [image data] blocks
return bb
def getAppExt(self, loops=float('inf')):
""" getAppExt(loops=float('inf'))
Application extention. This part specifies the amount of loops.
If loops is 0 or inf, it goes on infinitely.
"""
if loops == 0 or loops == float('inf'):
loops = 2 ** 16 - 1
# bb = "" application extension should not be used
# (the extension interprets zero loops
# to mean an infinite number of loops)
# Mmm, does not seem to work
if True:
bb = "\x21\xFF\x0B" # application extension
bb += "NETSCAPE2.0"
bb += "\x03\x01"
bb += intToBin(loops)
bb += '\x00' # end
return bb
def getGraphicsControlExt(self, duration=0.1, dispose=2):
""" getGraphicsControlExt(duration=0.1, dispose=2)
Graphics Control Extension. A sort of header at the start of
each image. Specifies duration and transparancy.
Dispose
-------
* 0 - No disposal specified.
* 1 - Do not dispose. The graphic is to be left in place.
* 2 - Restore to background color. The area used by the graphic
must be restored to the background color.
* 3 - Restore to previous. The decoder is required to restore the
area overwritten by the graphic with what was there prior to
rendering the graphic.
* 4-7 -To be defined.
"""
bb = '\x21\xF9\x04'
bb += chr((dispose & 3) << 2) # low bit 1 == transparency,
# 2nd bit 1 == user input , next 3 bits, the low two of which are used,
# are dispose.
bb += intToBin(int(duration * 100)) # in 100th of seconds
bb += '\x00' # no transparant color
bb += '\x00' # end
return bb
def handleSubRectangles(self, images, subRectangles):
""" handleSubRectangles(images)
Handle the sub-rectangle stuff. If the rectangles are given by the
user, the values are checked. Otherwise the subrectangles are
calculated automatically.
"""
if isinstance(subRectangles, (tuple, list)):
# xy given directly
# Check xy
xy = subRectangles
if xy is None:
xy = (0, 0)
if hasattr(xy, '__len__'):
if len(xy) == len(images):
xy = [xxyy for xxyy in xy]
else:
raise ValueError("len(xy) doesn't match amount of images.")
else:
xy = [xy for im in images]
xy[0] = (0, 0)
else:
# Calculate xy using some basic image processing
# Check Numpy
if np is None:
raise RuntimeError("Need Numpy to use auto-subRectangles.")
# First make numpy arrays if required
for i in range(len(images)):
im = images[i]
if isinstance(im, Image.Image):
tmp = im.convert() # Make without palette
a = np.asarray(tmp)
if len(a.shape) == 0:
raise MemoryError("Too little memory to convert PIL image to array")
images[i] = a
# Determine the sub rectangles
images, xy = self.getSubRectangles(images)
# Done
return images, xy
def convertImagesToPIL(self, images, dither, nq=0):
""" convertImagesToPIL(images, nq=0)
Convert images to Paletted PIL images, which can then be
written to a single animaged GIF.
"""
# Convert to PIL images
images2 = []
for im in images:
if isinstance(im, Image.Image):
images2.append(im)
elif np and isinstance(im, np.ndarray):
if im.ndim == 3 and im.shape[2] == 3:
im = Image.fromarray(im, 'RGB')
elif im.ndim == 3 and im.shape[2] == 4:
im = Image.fromarray(im[:, :, :3], 'RGB')
elif im.ndim == 2:
im = Image.fromarray(im, 'L')
images2.append(im)
# Convert to paletted PIL images
images, images2 = images2, []
# Adaptive PIL algorithm
AD = Image.ADAPTIVE
for im in images:
im = im.convert('P', palette=AD, dither=dither)
images2.append(im)
# Done
return images2
def writeGifToFile(self, fp, images, durations, loops, xys, disposes):
""" writeGifToFile(fp, images, durations, loops, xys, disposes)
Given a set of images writes the bytes to the specified stream.
"""
# Obtain palette for all images and count each occurance
palettes, occur = [], []
for im in images:
header, usedPaletteColors = getheader(im)
palettes.append(header[-1]) # Last part of the header is the frame palette
for palette in palettes:
occur.append(palettes.count(palette))
# Select most-used palette as the global one (or first in case no max)
globalPalette = palettes[occur.index(max(occur))]
# Init
frames = 0
firstFrame = True
for im, palette in zip(images, palettes):
if firstFrame:
# Write header
# Gather info
header = self.getheaderAnim(im)
appext = self.getAppExt(loops)
# Write
fp.write(header)
fp.write(globalPalette)
fp.write(appext)
# Next frame is not the first
firstFrame = False
if True:
# Write palette and image data
# Gather info
data = getdata(im)
imdes, data = b''.join(data[:-2]), data[-2:]
graphext = self.getGraphicsControlExt(durations[frames], disposes[frames])
# Make image descriptor suitable for using 256 local color palette
lid = self.getImageDescriptor(im, xys[frames])
# Write local header
if (palette != globalPalette) or (disposes[frames] != 2):
# Use local color palette
fp.write(graphext)
fp.write(lid) # write suitable image descriptor
fp.write(palette) # write local color table
fp.write('\x08') # LZW minimum size code
else:
# Use global color palette
fp.write(graphext)
fp.write(imdes) # write suitable image descriptor
for d in data:
fp.write(d)
# Prepare for next round
frames = frames + 1
fp.write(";") # end gif
return frames
|
thumbor/thumbor | thumbor/engines/extensions/pil.py | GifWriter.convertImagesToPIL | python | def convertImagesToPIL(self, images, dither, nq=0):
# Convert to PIL images
images2 = []
for im in images:
if isinstance(im, Image.Image):
images2.append(im)
elif np and isinstance(im, np.ndarray):
if im.ndim == 3 and im.shape[2] == 3:
im = Image.fromarray(im, 'RGB')
elif im.ndim == 3 and im.shape[2] == 4:
im = Image.fromarray(im[:, :, :3], 'RGB')
elif im.ndim == 2:
im = Image.fromarray(im, 'L')
images2.append(im)
# Convert to paletted PIL images
images, images2 = images2, []
# Adaptive PIL algorithm
AD = Image.ADAPTIVE
for im in images:
im = im.convert('P', palette=AD, dither=dither)
images2.append(im)
# Done
return images2 | convertImagesToPIL(images, nq=0)
Convert images to Paletted PIL images, which can then be
written to a single animaged GIF. | train | https://github.com/thumbor/thumbor/blob/558ccdd6e3bc29e1c9ee3687372c4b3eb05ac607/thumbor/engines/extensions/pil.py#L348-L380 | null | class GifWriter:
""" GifWriter()
Class that contains methods for helping write the animated GIF file.
"""
def getheaderAnim(self, im):
""" getheaderAnim(im)
Get animation header. To replace PILs getheader()[0]
"""
bb = "GIF89a"
bb += intToBin(im.size[0])
bb += intToBin(im.size[1])
bb += "\x87\x00\x00"
return bb
def getImageDescriptor(self, im, xy=None):
""" getImageDescriptor(im, xy=None)
Used for the local color table properties per image.
Otherwise global color table applies to all frames irrespective of
whether additional colors comes in play that require a redefined
palette. Still a maximum of 256 color per frame, obviously.
Written by Ant1 on 2010-08-22
Modified by Alex Robinson in Janurari 2011 to implement subrectangles.
"""
# Defaule use full image and place at upper left
if xy is None:
xy = (0, 0)
# Image separator,
bb = '\x2C'
# Image position and size
bb += intToBin(xy[0]) # Left position
bb += intToBin(xy[1]) # Top position
bb += intToBin(im.size[0]) # image width
bb += intToBin(im.size[1]) # image height
# packed field: local color table flag1, interlace0, sorted table0,
# reserved00, lct size111=7=2^(7+1)=256.
bb += '\x87'
# LZW minimum size code now comes later, begining of [image data] blocks
return bb
def getAppExt(self, loops=float('inf')):
""" getAppExt(loops=float('inf'))
Application extention. This part specifies the amount of loops.
If loops is 0 or inf, it goes on infinitely.
"""
if loops == 0 or loops == float('inf'):
loops = 2 ** 16 - 1
# bb = "" application extension should not be used
# (the extension interprets zero loops
# to mean an infinite number of loops)
# Mmm, does not seem to work
if True:
bb = "\x21\xFF\x0B" # application extension
bb += "NETSCAPE2.0"
bb += "\x03\x01"
bb += intToBin(loops)
bb += '\x00' # end
return bb
def getGraphicsControlExt(self, duration=0.1, dispose=2):
""" getGraphicsControlExt(duration=0.1, dispose=2)
Graphics Control Extension. A sort of header at the start of
each image. Specifies duration and transparancy.
Dispose
-------
* 0 - No disposal specified.
* 1 - Do not dispose. The graphic is to be left in place.
* 2 - Restore to background color. The area used by the graphic
must be restored to the background color.
* 3 - Restore to previous. The decoder is required to restore the
area overwritten by the graphic with what was there prior to
rendering the graphic.
* 4-7 -To be defined.
"""
bb = '\x21\xF9\x04'
bb += chr((dispose & 3) << 2) # low bit 1 == transparency,
# 2nd bit 1 == user input , next 3 bits, the low two of which are used,
# are dispose.
bb += intToBin(int(duration * 100)) # in 100th of seconds
bb += '\x00' # no transparant color
bb += '\x00' # end
return bb
def handleSubRectangles(self, images, subRectangles):
""" handleSubRectangles(images)
Handle the sub-rectangle stuff. If the rectangles are given by the
user, the values are checked. Otherwise the subrectangles are
calculated automatically.
"""
if isinstance(subRectangles, (tuple, list)):
# xy given directly
# Check xy
xy = subRectangles
if xy is None:
xy = (0, 0)
if hasattr(xy, '__len__'):
if len(xy) == len(images):
xy = [xxyy for xxyy in xy]
else:
raise ValueError("len(xy) doesn't match amount of images.")
else:
xy = [xy for im in images]
xy[0] = (0, 0)
else:
# Calculate xy using some basic image processing
# Check Numpy
if np is None:
raise RuntimeError("Need Numpy to use auto-subRectangles.")
# First make numpy arrays if required
for i in range(len(images)):
im = images[i]
if isinstance(im, Image.Image):
tmp = im.convert() # Make without palette
a = np.asarray(tmp)
if len(a.shape) == 0:
raise MemoryError("Too little memory to convert PIL image to array")
images[i] = a
# Determine the sub rectangles
images, xy = self.getSubRectangles(images)
# Done
return images, xy
def getSubRectangles(self, ims):
""" getSubRectangles(ims)
Calculate the minimal rectangles that need updating each frame.
Returns a two-element tuple containing the cropped images and a
list of x-y positions.
Calculating the subrectangles takes extra time, obviously. However,
if the image sizes were reduced, the actual writing of the GIF
goes faster. In some cases applying this method produces a GIF faster.
"""
# Check image count
if len(ims) < 2:
return ims, [(0, 0) for i in ims]
# We need numpy
if np is None:
raise RuntimeError("Need Numpy to calculate sub-rectangles. ")
# Prepare
ims2 = [ims[0]]
xy = [(0, 0)]
# t0 = time.time()
# Iterate over images
prev = ims[0]
for im in ims[1:]:
# Get difference, sum over colors
diff = np.abs(im - prev)
if diff.ndim == 3:
diff = diff.sum(2)
# Get begin and end for both dimensions
X = np.argwhere(diff.sum(0))
Y = np.argwhere(diff.sum(1))
# Get rect coordinates
if X.size and Y.size:
x0, x1 = X[0], X[-1] + 1
y0, y1 = Y[0], Y[-1] + 1
else: # No change ... make it minimal
x0, x1 = 0, 2
y0, y1 = 0, 2
# Cut out and store
im2 = im[y0:y1, x0:x1]
prev = im
ims2.append(im2)
xy.append((x0, y0))
# Done
# print('%1.2f seconds to determine subrectangles of %i images' %
# (time.time()-t0, len(ims2)) )
return ims2, xy
def writeGifToFile(self, fp, images, durations, loops, xys, disposes):
""" writeGifToFile(fp, images, durations, loops, xys, disposes)
Given a set of images writes the bytes to the specified stream.
"""
# Obtain palette for all images and count each occurance
palettes, occur = [], []
for im in images:
header, usedPaletteColors = getheader(im)
palettes.append(header[-1]) # Last part of the header is the frame palette
for palette in palettes:
occur.append(palettes.count(palette))
# Select most-used palette as the global one (or first in case no max)
globalPalette = palettes[occur.index(max(occur))]
# Init
frames = 0
firstFrame = True
for im, palette in zip(images, palettes):
if firstFrame:
# Write header
# Gather info
header = self.getheaderAnim(im)
appext = self.getAppExt(loops)
# Write
fp.write(header)
fp.write(globalPalette)
fp.write(appext)
# Next frame is not the first
firstFrame = False
if True:
# Write palette and image data
# Gather info
data = getdata(im)
imdes, data = b''.join(data[:-2]), data[-2:]
graphext = self.getGraphicsControlExt(durations[frames], disposes[frames])
# Make image descriptor suitable for using 256 local color palette
lid = self.getImageDescriptor(im, xys[frames])
# Write local header
if (palette != globalPalette) or (disposes[frames] != 2):
# Use local color palette
fp.write(graphext)
fp.write(lid) # write suitable image descriptor
fp.write(palette) # write local color table
fp.write('\x08') # LZW minimum size code
else:
# Use global color palette
fp.write(graphext)
fp.write(imdes) # write suitable image descriptor
for d in data:
fp.write(d)
# Prepare for next round
frames = frames + 1
fp.write(";") # end gif
return frames
|
thumbor/thumbor | thumbor/engines/extensions/pil.py | GifWriter.writeGifToFile | python | def writeGifToFile(self, fp, images, durations, loops, xys, disposes):
# Obtain palette for all images and count each occurance
palettes, occur = [], []
for im in images:
header, usedPaletteColors = getheader(im)
palettes.append(header[-1]) # Last part of the header is the frame palette
for palette in palettes:
occur.append(palettes.count(palette))
# Select most-used palette as the global one (or first in case no max)
globalPalette = palettes[occur.index(max(occur))]
# Init
frames = 0
firstFrame = True
for im, palette in zip(images, palettes):
if firstFrame:
# Write header
# Gather info
header = self.getheaderAnim(im)
appext = self.getAppExt(loops)
# Write
fp.write(header)
fp.write(globalPalette)
fp.write(appext)
# Next frame is not the first
firstFrame = False
if True:
# Write palette and image data
# Gather info
data = getdata(im)
imdes, data = b''.join(data[:-2]), data[-2:]
graphext = self.getGraphicsControlExt(durations[frames], disposes[frames])
# Make image descriptor suitable for using 256 local color palette
lid = self.getImageDescriptor(im, xys[frames])
# Write local header
if (palette != globalPalette) or (disposes[frames] != 2):
# Use local color palette
fp.write(graphext)
fp.write(lid) # write suitable image descriptor
fp.write(palette) # write local color table
fp.write('\x08') # LZW minimum size code
else:
# Use global color palette
fp.write(graphext)
fp.write(imdes) # write suitable image descriptor
for d in data:
fp.write(d)
# Prepare for next round
frames = frames + 1
fp.write(";") # end gif
return frames | writeGifToFile(fp, images, durations, loops, xys, disposes)
Given a set of images writes the bytes to the specified stream. | train | https://github.com/thumbor/thumbor/blob/558ccdd6e3bc29e1c9ee3687372c4b3eb05ac607/thumbor/engines/extensions/pil.py#L382-L450 | [
"def getheaderAnim(self, im):\n \"\"\" getheaderAnim(im)\n\n Get animation header. To replace PILs getheader()[0]\n\n \"\"\"\n bb = \"GIF89a\"\n bb += intToBin(im.size[0])\n bb += intToBin(im.size[1])\n bb += \"\\x87\\x00\\x00\"\n return bb\n",
"def getImageDescriptor(self, im, xy=None):\n... | class GifWriter:
""" GifWriter()
Class that contains methods for helping write the animated GIF file.
"""
def getheaderAnim(self, im):
""" getheaderAnim(im)
Get animation header. To replace PILs getheader()[0]
"""
bb = "GIF89a"
bb += intToBin(im.size[0])
bb += intToBin(im.size[1])
bb += "\x87\x00\x00"
return bb
def getImageDescriptor(self, im, xy=None):
""" getImageDescriptor(im, xy=None)
Used for the local color table properties per image.
Otherwise global color table applies to all frames irrespective of
whether additional colors comes in play that require a redefined
palette. Still a maximum of 256 color per frame, obviously.
Written by Ant1 on 2010-08-22
Modified by Alex Robinson in Janurari 2011 to implement subrectangles.
"""
# Defaule use full image and place at upper left
if xy is None:
xy = (0, 0)
# Image separator,
bb = '\x2C'
# Image position and size
bb += intToBin(xy[0]) # Left position
bb += intToBin(xy[1]) # Top position
bb += intToBin(im.size[0]) # image width
bb += intToBin(im.size[1]) # image height
# packed field: local color table flag1, interlace0, sorted table0,
# reserved00, lct size111=7=2^(7+1)=256.
bb += '\x87'
# LZW minimum size code now comes later, begining of [image data] blocks
return bb
def getAppExt(self, loops=float('inf')):
""" getAppExt(loops=float('inf'))
Application extention. This part specifies the amount of loops.
If loops is 0 or inf, it goes on infinitely.
"""
if loops == 0 or loops == float('inf'):
loops = 2 ** 16 - 1
# bb = "" application extension should not be used
# (the extension interprets zero loops
# to mean an infinite number of loops)
# Mmm, does not seem to work
if True:
bb = "\x21\xFF\x0B" # application extension
bb += "NETSCAPE2.0"
bb += "\x03\x01"
bb += intToBin(loops)
bb += '\x00' # end
return bb
def getGraphicsControlExt(self, duration=0.1, dispose=2):
""" getGraphicsControlExt(duration=0.1, dispose=2)
Graphics Control Extension. A sort of header at the start of
each image. Specifies duration and transparancy.
Dispose
-------
* 0 - No disposal specified.
* 1 - Do not dispose. The graphic is to be left in place.
* 2 - Restore to background color. The area used by the graphic
must be restored to the background color.
* 3 - Restore to previous. The decoder is required to restore the
area overwritten by the graphic with what was there prior to
rendering the graphic.
* 4-7 -To be defined.
"""
bb = '\x21\xF9\x04'
bb += chr((dispose & 3) << 2) # low bit 1 == transparency,
# 2nd bit 1 == user input , next 3 bits, the low two of which are used,
# are dispose.
bb += intToBin(int(duration * 100)) # in 100th of seconds
bb += '\x00' # no transparant color
bb += '\x00' # end
return bb
def handleSubRectangles(self, images, subRectangles):
""" handleSubRectangles(images)
Handle the sub-rectangle stuff. If the rectangles are given by the
user, the values are checked. Otherwise the subrectangles are
calculated automatically.
"""
if isinstance(subRectangles, (tuple, list)):
# xy given directly
# Check xy
xy = subRectangles
if xy is None:
xy = (0, 0)
if hasattr(xy, '__len__'):
if len(xy) == len(images):
xy = [xxyy for xxyy in xy]
else:
raise ValueError("len(xy) doesn't match amount of images.")
else:
xy = [xy for im in images]
xy[0] = (0, 0)
else:
# Calculate xy using some basic image processing
# Check Numpy
if np is None:
raise RuntimeError("Need Numpy to use auto-subRectangles.")
# First make numpy arrays if required
for i in range(len(images)):
im = images[i]
if isinstance(im, Image.Image):
tmp = im.convert() # Make without palette
a = np.asarray(tmp)
if len(a.shape) == 0:
raise MemoryError("Too little memory to convert PIL image to array")
images[i] = a
# Determine the sub rectangles
images, xy = self.getSubRectangles(images)
# Done
return images, xy
def getSubRectangles(self, ims):
""" getSubRectangles(ims)
Calculate the minimal rectangles that need updating each frame.
Returns a two-element tuple containing the cropped images and a
list of x-y positions.
Calculating the subrectangles takes extra time, obviously. However,
if the image sizes were reduced, the actual writing of the GIF
goes faster. In some cases applying this method produces a GIF faster.
"""
# Check image count
if len(ims) < 2:
return ims, [(0, 0) for i in ims]
# We need numpy
if np is None:
raise RuntimeError("Need Numpy to calculate sub-rectangles. ")
# Prepare
ims2 = [ims[0]]
xy = [(0, 0)]
# t0 = time.time()
# Iterate over images
prev = ims[0]
for im in ims[1:]:
# Get difference, sum over colors
diff = np.abs(im - prev)
if diff.ndim == 3:
diff = diff.sum(2)
# Get begin and end for both dimensions
X = np.argwhere(diff.sum(0))
Y = np.argwhere(diff.sum(1))
# Get rect coordinates
if X.size and Y.size:
x0, x1 = X[0], X[-1] + 1
y0, y1 = Y[0], Y[-1] + 1
else: # No change ... make it minimal
x0, x1 = 0, 2
y0, y1 = 0, 2
# Cut out and store
im2 = im[y0:y1, x0:x1]
prev = im
ims2.append(im2)
xy.append((x0, y0))
# Done
# print('%1.2f seconds to determine subrectangles of %i images' %
# (time.time()-t0, len(ims2)) )
return ims2, xy
def convertImagesToPIL(self, images, dither, nq=0):
""" convertImagesToPIL(images, nq=0)
Convert images to Paletted PIL images, which can then be
written to a single animaged GIF.
"""
# Convert to PIL images
images2 = []
for im in images:
if isinstance(im, Image.Image):
images2.append(im)
elif np and isinstance(im, np.ndarray):
if im.ndim == 3 and im.shape[2] == 3:
im = Image.fromarray(im, 'RGB')
elif im.ndim == 3 and im.shape[2] == 4:
im = Image.fromarray(im[:, :, :3], 'RGB')
elif im.ndim == 2:
im = Image.fromarray(im, 'L')
images2.append(im)
# Convert to paletted PIL images
images, images2 = images2, []
# Adaptive PIL algorithm
AD = Image.ADAPTIVE
for im in images:
im = im.convert('P', palette=AD, dither=dither)
images2.append(im)
# Done
return images2
|
jldbc/pybaseball | pybaseball/team_results.py | process_win_streak | python | def process_win_streak(data):
#only do this if there are non-NANs in the column
if data['Streak'].count()>0:
data['Streak2'] = data['Streak'].str.len()
data.loc[data['Streak'].str[0]=='-','Streak2'] = -data['Streak2']
data['Streak'] = data['Streak2']
data = data.drop('Streak2',1)
return data | Convert "+++"/"---" formatted win/loss streak column into a +/- integer column | train | https://github.com/jldbc/pybaseball/blob/085ea26bfd1b5f5926d79d4fac985c88278115f2/pybaseball/team_results.py#L71-L81 | null | import numpy as np
import pandas as pd
import requests
from bs4 import BeautifulSoup
from datetime import datetime
from pybaseball.lahman import teams
from pybaseball.utils import first_season_map
# TODO: retrieve data for all teams? a full season's worth of results
def get_soup(season, team):
# get most recent year's schedule if year not specified
if(season is None):
season = datetime.datetime.today().strftime("%Y")
url = "http://www.baseball-reference.com/teams/{}/{}-schedule-scores.shtml".format(team, season)
s=requests.get(url).content
return BeautifulSoup(s, "lxml")
def get_table(soup,team):
try:
table = soup.find_all('table')[0]
except:
raise ValueError("Data cannot be retrieved for this team/year combo. Please verify that your team abbreviation is accurate and that the team existed during the season you are searching for.")
data = []
headings = [th.get_text() for th in table.find("tr").find_all("th")]
headings = headings[1:] # the "gm#" heading doesn't have a <td> element
headings[3] = "Home_Away"
data.append(headings)
table_body = table.find('tbody')
rows = table_body.find_all('tr')
for row_index in range(len(rows)-1): #last row is a description of column meanings
row = rows[row_index]
try:
cols = row.find_all('td')
#links = row.find_all('a')
if cols[1].text == "":
cols[1].string = team # some of the older season don't seem to have team abbreviation on their tables
if cols[3].text == "":
cols[3].string = 'Home' # this element only has an entry if it's an away game
if cols[12].text == "":
cols[12].string = "None" # tie games won't have a pitcher win or loss
if cols[13].text == "":
cols[13].string = "None"
if cols[14].text == "":
cols[14].string = "None" # games w/o saves have blank td entry
if cols[8].text == "":
cols[8].string = "9" # entry is blank if no extra innings
if cols[16].text=="":
cols[16].string = "Unknown"
if cols[15].text=="":
cols[15].string = "Unknown"
if cols[17].text == "":
cols[17].string = "Unknown" # Unknown if attendance data is missing
cols = [ele.text.strip() for ele in cols]
data.append([ele for ele in cols if ele])
except:
# two cases will break the above: games that haven't happened yet, and BR's redundant mid-table headers
# if future games, grab the scheduling info. Otherwise do nothing.
if len(cols)>1:
cols = [ele.text.strip() for ele in cols][0:5]
data.append([ele for ele in cols if ele])
#convert to pandas dataframe. make first row the table's column names and reindex.
data = pd.DataFrame(data)
data = data.rename(columns=data.iloc[0])
data = data.reindex(data.index.drop(0))
data = data.drop('',1) #not a useful column
data['Attendance'].replace(r'^Unknown$', np.nan, regex=True, inplace = True) # make this a NaN so the column can benumeric
return data
def make_numeric(data):
# first remove commas from attendance values
# skip if column is all NA (not sure if everyone kept records in the early days)
if data['Attendance'].count()>0:
data['Attendance'] = data['Attendance'].str.replace(',','')
#data[num_cols] = data[num_cols].astype(float)
else:
data['Attendance'] = np.nan
# now make everything numeric
num_cols = ["R","RA","Inn","Rank","Attendance"]#,'Streak']
data[num_cols] = data[num_cols].astype(float) #not int because of NaNs
return data
def schedule_and_record(season=None, team=None):
# retrieve html from baseball reference
# sanatize input
team = team.upper()
try:
if season < first_season_map[team]:
m = "Season cannot be before first year of a team's existence"
raise ValueError(m)
# ignore validation if team isn't found in dictionary
except KeyError:
pass
if season > datetime.now().year:
raise ValueError('Season cannot be after current year')
soup = get_soup(season, team)
table = get_table(soup, team)
table = process_win_streak(table)
table = make_numeric(table)
return table
|
jldbc/pybaseball | pybaseball/utils.py | split_request | python | def split_request(start_dt, end_dt, player_id, url):
current_dt = datetime.datetime.strptime(start_dt, '%Y-%m-%d')
end_dt = datetime.datetime.strptime(end_dt, '%Y-%m-%d')
results = [] # list to hold data as it is returned
player_id = str(player_id)
print('Gathering Player Data')
# break query into multiple requests
while current_dt < end_dt:
remaining = end_dt - current_dt
# increment date ranges by at most 60 days
delta = min(remaining, datetime.timedelta(days=2190))
next_dt = current_dt + delta
start_str = current_dt.strftime('%Y-%m-%d')
end_str = next_dt.strftime('%Y-%m-%d')
# retrieve data
data = requests.get(url.format(start_str, end_str, player_id))
df = pd.read_csv(io.StringIO(data.text))
# add data to list and increment current dates
results.append(df)
current_dt = next_dt + datetime.timedelta(days=1)
return pd.concat(results) | Splits Statcast queries to avoid request timeouts | train | https://github.com/jldbc/pybaseball/blob/085ea26bfd1b5f5926d79d4fac985c88278115f2/pybaseball/utils.py#L69-L92 | null | import pandas as pd
import requests
import datetime
import io
import zipfile
# dictionary containing team abbreviations and their first year in existance
first_season_map = {'ALT': 1884, 'ANA': 1997, 'ARI': 1998, 'ATH': 1871,
'ATL': 1966, 'BAL': 1872, 'BLA': 1901, 'BLN': 1892,
'BLU': 1884, 'BOS': 1871, 'BRA': 1872, 'BRG': 1890,
'BRO': 1884, 'BSN': 1876, 'BTT': 1914, 'BUF': 1879,
'BWW': 1890, 'CAL': 1965, 'CEN': 1875, 'CHC': 1876,
'CHI': 1871, 'CHW': 1901, 'CIN': 1876, 'CKK': 1891,
'CLE': 1871, 'CLV': 1879, 'COL': 1883, 'COR': 1884,
'CPI': 1884, 'DET': 1901, 'DTN': 1881, 'ECK': 1872,
'FLA': 1993, 'HAR': 1874, 'HOU': 1962, 'IND': 1878,
'KCA': 1955, 'KCC': 1884, 'KCN': 1886, 'KCP': 1914,
'KCR': 1969, 'KEK': 1871, 'LAA': 1961, 'LAD': 1958,
'LOU': 1876, 'MAN': 1872, 'MAR': 1873, 'MIA': 2012,
'MIL': 1884, 'MIN': 1961, 'MLA': 1901, 'MLG': 1878,
'MLN': 1953, 'MON': 1969, 'NAT': 1872, 'NEW': 1915,
'NHV': 1875, 'NYG': 1883, 'NYI': 1890, 'NYM': 1962,
'NYP': 1883, 'NYU': 1871, 'NYY': 1903, 'OAK': 1968,
'OLY': 1871, 'PBB': 1890, 'PBS': 1914, 'PHA': 1882,
'PHI': 1873, 'PHK': 1884, 'PHQ': 1890, 'PIT': 1882,
'PRO': 1878, 'RES': 1873, 'RIC': 1884, 'ROC': 1890,
'ROK': 1871, 'SDP': 1969, 'SEA': 1977, 'SEP': 1969,
'SFG': 1958, 'SLB': 1902, 'SLM': 1884, 'SLR': 1875,
'STL': 1875, 'STP': 1884, 'SYR': 1879, 'TBD': 1998,
'TBR': 2008, 'TEX': 1972, 'TOL': 1884, 'TOR': 1977,
'TRO': 1871, 'WAS': 1873, 'WES': 1875, 'WHS': 1884,
'WIL': 1884, 'WOR': 1880, 'WSA': 1961, 'WSH': 1901,
'WSN': 2005}
def validate_datestring(date_text):
try:
datetime.datetime.strptime(date_text, '%Y-%m-%d')
except ValueError:
raise ValueError("Incorrect data format, should be YYYY-MM-DD")
def sanitize_input(start_dt, end_dt, player_id):
# error if no player ID provided
if player_id is None:
raise ValueError("Player ID is required. If you need to find a player's id, try pybaseball.playerid_lookup(last_name, first_name) and use their key_mlbam. If you want statcast data for all players, try the statcast() function.")
# this id should be a string to place inside a url
player_id = str(player_id)
# if no dates are supplied, assume they want yesterday's data
# send a warning in case they wanted to specify
if start_dt is None and end_dt is None:
today = datetime.datetime.today()
start_dt = (today - datetime.timedelta(1)).strftime("%Y-%m-%d")
end_dt = today.strftime("%Y-%m-%d")
print("Warning: no date range supplied. Returning yesterday's Statcast data. For a different date range, try get_statcast(start_dt, end_dt).")
# if only one date is supplied, assume they only want that day's stats
# query in this case is from date 1 to date 1
if start_dt is None:
start_dt = end_dt
if end_dt is None:
end_dt = start_dt
# now that both dates are not None, make sure they are valid date strings
validate_datestring(start_dt)
validate_datestring(end_dt)
return start_dt, end_dt, player_id
def get_zip_file(url):
"""
Get zip file from provided URL
"""
with requests.get(url, stream=True) as f:
z = zipfile.ZipFile(io.BytesIO(f.content))
return z
|
jldbc/pybaseball | pybaseball/utils.py | get_zip_file | python | def get_zip_file(url):
with requests.get(url, stream=True) as f:
z = zipfile.ZipFile(io.BytesIO(f.content))
return z | Get zip file from provided URL | train | https://github.com/jldbc/pybaseball/blob/085ea26bfd1b5f5926d79d4fac985c88278115f2/pybaseball/utils.py#L95-L101 | null | import pandas as pd
import requests
import datetime
import io
import zipfile
# dictionary containing team abbreviations and their first year in existance
first_season_map = {'ALT': 1884, 'ANA': 1997, 'ARI': 1998, 'ATH': 1871,
'ATL': 1966, 'BAL': 1872, 'BLA': 1901, 'BLN': 1892,
'BLU': 1884, 'BOS': 1871, 'BRA': 1872, 'BRG': 1890,
'BRO': 1884, 'BSN': 1876, 'BTT': 1914, 'BUF': 1879,
'BWW': 1890, 'CAL': 1965, 'CEN': 1875, 'CHC': 1876,
'CHI': 1871, 'CHW': 1901, 'CIN': 1876, 'CKK': 1891,
'CLE': 1871, 'CLV': 1879, 'COL': 1883, 'COR': 1884,
'CPI': 1884, 'DET': 1901, 'DTN': 1881, 'ECK': 1872,
'FLA': 1993, 'HAR': 1874, 'HOU': 1962, 'IND': 1878,
'KCA': 1955, 'KCC': 1884, 'KCN': 1886, 'KCP': 1914,
'KCR': 1969, 'KEK': 1871, 'LAA': 1961, 'LAD': 1958,
'LOU': 1876, 'MAN': 1872, 'MAR': 1873, 'MIA': 2012,
'MIL': 1884, 'MIN': 1961, 'MLA': 1901, 'MLG': 1878,
'MLN': 1953, 'MON': 1969, 'NAT': 1872, 'NEW': 1915,
'NHV': 1875, 'NYG': 1883, 'NYI': 1890, 'NYM': 1962,
'NYP': 1883, 'NYU': 1871, 'NYY': 1903, 'OAK': 1968,
'OLY': 1871, 'PBB': 1890, 'PBS': 1914, 'PHA': 1882,
'PHI': 1873, 'PHK': 1884, 'PHQ': 1890, 'PIT': 1882,
'PRO': 1878, 'RES': 1873, 'RIC': 1884, 'ROC': 1890,
'ROK': 1871, 'SDP': 1969, 'SEA': 1977, 'SEP': 1969,
'SFG': 1958, 'SLB': 1902, 'SLM': 1884, 'SLR': 1875,
'STL': 1875, 'STP': 1884, 'SYR': 1879, 'TBD': 1998,
'TBR': 2008, 'TEX': 1972, 'TOL': 1884, 'TOR': 1977,
'TRO': 1871, 'WAS': 1873, 'WES': 1875, 'WHS': 1884,
'WIL': 1884, 'WOR': 1880, 'WSA': 1961, 'WSH': 1901,
'WSN': 2005}
def validate_datestring(date_text):
try:
datetime.datetime.strptime(date_text, '%Y-%m-%d')
except ValueError:
raise ValueError("Incorrect data format, should be YYYY-MM-DD")
def sanitize_input(start_dt, end_dt, player_id):
# error if no player ID provided
if player_id is None:
raise ValueError("Player ID is required. If you need to find a player's id, try pybaseball.playerid_lookup(last_name, first_name) and use their key_mlbam. If you want statcast data for all players, try the statcast() function.")
# this id should be a string to place inside a url
player_id = str(player_id)
# if no dates are supplied, assume they want yesterday's data
# send a warning in case they wanted to specify
if start_dt is None and end_dt is None:
today = datetime.datetime.today()
start_dt = (today - datetime.timedelta(1)).strftime("%Y-%m-%d")
end_dt = today.strftime("%Y-%m-%d")
print("Warning: no date range supplied. Returning yesterday's Statcast data. For a different date range, try get_statcast(start_dt, end_dt).")
# if only one date is supplied, assume they only want that day's stats
# query in this case is from date 1 to date 1
if start_dt is None:
start_dt = end_dt
if end_dt is None:
end_dt = start_dt
# now that both dates are not None, make sure they are valid date strings
validate_datestring(start_dt)
validate_datestring(end_dt)
return start_dt, end_dt, player_id
def split_request(start_dt, end_dt, player_id, url):
"""
Splits Statcast queries to avoid request timeouts
"""
current_dt = datetime.datetime.strptime(start_dt, '%Y-%m-%d')
end_dt = datetime.datetime.strptime(end_dt, '%Y-%m-%d')
results = [] # list to hold data as it is returned
player_id = str(player_id)
print('Gathering Player Data')
# break query into multiple requests
while current_dt < end_dt:
remaining = end_dt - current_dt
# increment date ranges by at most 60 days
delta = min(remaining, datetime.timedelta(days=2190))
next_dt = current_dt + delta
start_str = current_dt.strftime('%Y-%m-%d')
end_str = next_dt.strftime('%Y-%m-%d')
# retrieve data
data = requests.get(url.format(start_str, end_str, player_id))
df = pd.read_csv(io.StringIO(data.text))
# add data to list and increment current dates
results.append(df)
current_dt = next_dt + datetime.timedelta(days=1)
return pd.concat(results)
|
jldbc/pybaseball | pybaseball/statcast_pitcher.py | statcast_pitcher | python | def statcast_pitcher(start_dt=None, end_dt=None, player_id=None):
start_dt, end_dt, player_id = sanitize_input(start_dt, end_dt, player_id)
# inputs are valid if either both or zero dates are supplied. Not valid of only one given.
if start_dt and end_dt:
url = 'https://baseballsavant.mlb.com/statcast_search/csv?all=true&hfPT=&hfAB=&hfBBT=&hfPR=&hfZ=&stadium=&hfBBL=&hfNewZones=&hfGT=R%7CPO%7CS%7C=&hfSea=&hfSit=&player_type=pitcher&hfOuts=&opponent=&pitcher_throws=&batter_stands=&hfSA=&game_date_gt={}&game_date_lt={}&pitchers_lookup%5B%5D={}&team=&position=&hfRO=&home_road=&hfFlag=&metric_1=&hfInn=&min_pitches=0&min_results=0&group_by=name&sort_col=pitches&player_event_sort=h_launch_speed&sort_order=desc&min_abs=0&type=details&'
df = split_request(start_dt, end_dt, player_id, url)
return df | Pulls statcast pitch-level data from Baseball Savant for a given pitcher.
ARGUMENTS
start_dt : YYYY-MM-DD : the first date for which you want a player's statcast data
end_dt : YYYY-MM-DD : the final date for which you want data
player_id : INT : the player's MLBAM ID. Find this by calling pybaseball.playerid_lookup(last_name, first_name), finding the correct player, and selecting their key_mlbam. | train | https://github.com/jldbc/pybaseball/blob/085ea26bfd1b5f5926d79d4fac985c88278115f2/pybaseball/statcast_pitcher.py#L4-L19 | [
"def sanitize_input(start_dt, end_dt, player_id):\n # error if no player ID provided\n if player_id is None:\n raise ValueError(\"Player ID is required. If you need to find a player's id, try pybaseball.playerid_lookup(last_name, first_name) and use their key_mlbam. If you want statcast data for all pl... | from pybaseball.utils import sanitize_input, split_request
|
jldbc/pybaseball | pybaseball/league_batting_stats.py | batting_stats_range | python | def batting_stats_range(start_dt=None, end_dt=None):
# make sure date inputs are valid
start_dt, end_dt = sanitize_input(start_dt, end_dt)
if datetime.datetime.strptime(start_dt, "%Y-%m-%d").year < 2008:
raise ValueError("Year must be 2008 or later")
if datetime.datetime.strptime(end_dt, "%Y-%m-%d").year < 2008:
raise ValueError("Year must be 2008 or later")
# retrieve html from baseball reference
soup = get_soup(start_dt, end_dt)
table = get_table(soup)
table = table.dropna(how='all') # drop if all columns are NA
# scraped data is initially in string format.
# convert the necessary columns to numeric.
for column in ['Age', '#days', 'G', 'PA', 'AB', 'R', 'H', '2B', '3B',
'HR', 'RBI', 'BB', 'IBB', 'SO', 'HBP', 'SH', 'SF', 'GDP',
'SB', 'CS', 'BA', 'OBP', 'SLG', 'OPS']:
#table[column] = table[column].astype('float')
table[column] = pd.to_numeric(table[column])
#table['column'] = table['column'].convert_objects(convert_numeric=True)
table = table.drop('', 1)
return table | Get all batting stats for a set time range. This can be the past week, the
month of August, anything. Just supply the start and end date in YYYY-MM-DD
format. | train | https://github.com/jldbc/pybaseball/blob/085ea26bfd1b5f5926d79d4fac985c88278115f2/pybaseball/league_batting_stats.py#L67-L92 | [
"def sanitize_input(start_dt, end_dt):\n # if no dates are supplied, assume they want yesterday's data\n # send a warning in case they wanted to specify\n if start_dt is None and end_dt is None:\n today = datetime.datetime.today()\n start_dt = (today - datetime.timedelta(1)).strftime(\"%Y-%m-... | import requests
import pandas as pd
import datetime
import io
from bs4 import BeautifulSoup
def validate_datestring(date_text):
try:
datetime.datetime.strptime(date_text, '%Y-%m-%d')
except ValueError:
raise ValueError("Incorrect data format, should be YYYY-MM-DD")
def sanitize_input(start_dt, end_dt):
# if no dates are supplied, assume they want yesterday's data
# send a warning in case they wanted to specify
if start_dt is None and end_dt is None:
today = datetime.datetime.today()
start_dt = (today - datetime.timedelta(1)).strftime("%Y-%m-%d")
end_dt = today.strftime("%Y-%m-%d")
print("Warning: no date range supplied. Returning yesterday's data. For a different date range, try batting_stats_range(start_dt, end_dt) or batting_stats(season).")
#if only one date is supplied, assume they only want that day's stats
#query in this case is from date 1 to date 1
if start_dt is None:
start_dt = end_dt
if end_dt is None:
end_dt = start_dt
#if end date occurs before start date, swap them
if end_dt < start_dt:
temp = start_dt
start_dt = end_dt
end_dt = temp
# now that both dates are not None, make sure they are valid date strings
validate_datestring(start_dt)
validate_datestring(end_dt)
return start_dt, end_dt
def get_soup(start_dt, end_dt):
# get most recent standings if date not specified
# if((start_dt is None) or (end_dt is None)):
# print('Error: a date range needs to be specified')
# return None
url = "http://www.baseball-reference.com/leagues/daily.cgi?user_team=&bust_cache=&type=b&lastndays=7&dates=fromandto&fromandto={}.{}&level=mlb&franch=&stat=&stat_value=0".format(start_dt, end_dt)
s = requests.get(url).content
return BeautifulSoup(s, "lxml")
def get_table(soup):
table = soup.find_all('table')[0]
data = []
headings = [th.get_text() for th in table.find("tr").find_all("th")][1:]
data.append(headings)
table_body = table.find('tbody')
rows = table_body.find_all('tr')
for row in rows:
cols = row.find_all('td')
cols = [ele.text.strip() for ele in cols]
data.append([ele for ele in cols])
data = pd.DataFrame(data)
data = data.rename(columns=data.iloc[0])
data = data.reindex(data.index.drop(0))
return data
def batting_stats_bref(season=None):
"""
Get all batting stats for a set season. If no argument is supplied, gives
stats for current season to date.
"""
if season is None:
season = datetime.datetime.today().strftime("%Y")
season = str(season)
start_dt = season + '-03-01' #opening day is always late march or early april
end_dt = season + '-11-01' #season is definitely over by November
return(batting_stats_range(start_dt, end_dt))
def bwar_bat(return_all=False):
"""
Get data from war_daily_bat table. Returns WAR, its components, and a few other useful stats.
To get all fields from this table, supply argument return_all=True.
"""
url = "http://www.baseball-reference.com/data/war_daily_bat.txt"
s = requests.get(url).content
c=pd.read_csv(io.StringIO(s.decode('utf-8')))
if return_all:
return c
else:
cols_to_keep = ['name_common', 'mlb_ID', 'player_ID', 'year_ID', 'team_ID', 'stint_ID', 'lg_ID',
'pitcher','G', 'PA', 'salary', 'runs_above_avg', 'runs_above_avg_off','runs_above_avg_def',
'WAR_rep','WAA','WAR']
return c[cols_to_keep] |
jldbc/pybaseball | pybaseball/league_batting_stats.py | batting_stats_bref | python | def batting_stats_bref(season=None):
if season is None:
season = datetime.datetime.today().strftime("%Y")
season = str(season)
start_dt = season + '-03-01' #opening day is always late march or early april
end_dt = season + '-11-01' #season is definitely over by November
return(batting_stats_range(start_dt, end_dt)) | Get all batting stats for a set season. If no argument is supplied, gives
stats for current season to date. | train | https://github.com/jldbc/pybaseball/blob/085ea26bfd1b5f5926d79d4fac985c88278115f2/pybaseball/league_batting_stats.py#L95-L105 | [
"def batting_stats_range(start_dt=None, end_dt=None):\n \"\"\"\n Get all batting stats for a set time range. This can be the past week, the\n month of August, anything. Just supply the start and end date in YYYY-MM-DD\n format.\n \"\"\"\n # make sure date inputs are valid\n start_dt, end_dt = s... | import requests
import pandas as pd
import datetime
import io
from bs4 import BeautifulSoup
def validate_datestring(date_text):
try:
datetime.datetime.strptime(date_text, '%Y-%m-%d')
except ValueError:
raise ValueError("Incorrect data format, should be YYYY-MM-DD")
def sanitize_input(start_dt, end_dt):
# if no dates are supplied, assume they want yesterday's data
# send a warning in case they wanted to specify
if start_dt is None and end_dt is None:
today = datetime.datetime.today()
start_dt = (today - datetime.timedelta(1)).strftime("%Y-%m-%d")
end_dt = today.strftime("%Y-%m-%d")
print("Warning: no date range supplied. Returning yesterday's data. For a different date range, try batting_stats_range(start_dt, end_dt) or batting_stats(season).")
#if only one date is supplied, assume they only want that day's stats
#query in this case is from date 1 to date 1
if start_dt is None:
start_dt = end_dt
if end_dt is None:
end_dt = start_dt
#if end date occurs before start date, swap them
if end_dt < start_dt:
temp = start_dt
start_dt = end_dt
end_dt = temp
# now that both dates are not None, make sure they are valid date strings
validate_datestring(start_dt)
validate_datestring(end_dt)
return start_dt, end_dt
def get_soup(start_dt, end_dt):
# get most recent standings if date not specified
# if((start_dt is None) or (end_dt is None)):
# print('Error: a date range needs to be specified')
# return None
url = "http://www.baseball-reference.com/leagues/daily.cgi?user_team=&bust_cache=&type=b&lastndays=7&dates=fromandto&fromandto={}.{}&level=mlb&franch=&stat=&stat_value=0".format(start_dt, end_dt)
s = requests.get(url).content
return BeautifulSoup(s, "lxml")
def get_table(soup):
table = soup.find_all('table')[0]
data = []
headings = [th.get_text() for th in table.find("tr").find_all("th")][1:]
data.append(headings)
table_body = table.find('tbody')
rows = table_body.find_all('tr')
for row in rows:
cols = row.find_all('td')
cols = [ele.text.strip() for ele in cols]
data.append([ele for ele in cols])
data = pd.DataFrame(data)
data = data.rename(columns=data.iloc[0])
data = data.reindex(data.index.drop(0))
return data
def batting_stats_range(start_dt=None, end_dt=None):
"""
Get all batting stats for a set time range. This can be the past week, the
month of August, anything. Just supply the start and end date in YYYY-MM-DD
format.
"""
# make sure date inputs are valid
start_dt, end_dt = sanitize_input(start_dt, end_dt)
if datetime.datetime.strptime(start_dt, "%Y-%m-%d").year < 2008:
raise ValueError("Year must be 2008 or later")
if datetime.datetime.strptime(end_dt, "%Y-%m-%d").year < 2008:
raise ValueError("Year must be 2008 or later")
# retrieve html from baseball reference
soup = get_soup(start_dt, end_dt)
table = get_table(soup)
table = table.dropna(how='all') # drop if all columns are NA
# scraped data is initially in string format.
# convert the necessary columns to numeric.
for column in ['Age', '#days', 'G', 'PA', 'AB', 'R', 'H', '2B', '3B',
'HR', 'RBI', 'BB', 'IBB', 'SO', 'HBP', 'SH', 'SF', 'GDP',
'SB', 'CS', 'BA', 'OBP', 'SLG', 'OPS']:
#table[column] = table[column].astype('float')
table[column] = pd.to_numeric(table[column])
#table['column'] = table['column'].convert_objects(convert_numeric=True)
table = table.drop('', 1)
return table
def bwar_bat(return_all=False):
"""
Get data from war_daily_bat table. Returns WAR, its components, and a few other useful stats.
To get all fields from this table, supply argument return_all=True.
"""
url = "http://www.baseball-reference.com/data/war_daily_bat.txt"
s = requests.get(url).content
c=pd.read_csv(io.StringIO(s.decode('utf-8')))
if return_all:
return c
else:
cols_to_keep = ['name_common', 'mlb_ID', 'player_ID', 'year_ID', 'team_ID', 'stint_ID', 'lg_ID',
'pitcher','G', 'PA', 'salary', 'runs_above_avg', 'runs_above_avg_off','runs_above_avg_def',
'WAR_rep','WAA','WAR']
return c[cols_to_keep] |
jldbc/pybaseball | pybaseball/retrosheet.py | season_game_logs | python | def season_game_logs(season):
# validate input
max_year = int(datetime.now().year) - 1
if season > max_year or season < 1871:
raise ValueError('Season must be between 1871 and {}'.format(max_year))
file_name = 'GL{}.TXT'.format(season)
z = get_zip_file(gamelog_url.format(season))
data = pd.read_csv(z.open(file_name), header=None, sep=',', quotechar='"')
data.columns = gamelog_columns
return data | Pull Retrosheet game logs for a given season | train | https://github.com/jldbc/pybaseball/blob/085ea26bfd1b5f5926d79d4fac985c88278115f2/pybaseball/retrosheet.py#L88-L100 | [
"def get_zip_file(url):\n \"\"\"\n Get zip file from provided URL\n \"\"\"\n with requests.get(url, stream=True) as f:\n z = zipfile.ZipFile(io.BytesIO(f.content))\n return z\n"
] | """
Retrosheet Data Notice:
Recipients of Retrosheet data are free to make any desired use of
the information, including (but not limited to) selling it,
giving it away, or producing a commercial product based upon the
data. Retrosheet has one requirement for any such transfer of
data or product development, which is that the following
statement must appear prominently:
The information used here was obtained free of
charge from and is copyrighted by Retrosheet. Interested
parties may contact Retrosheet at "www.retrosheet.org".
Retrosheet makes no guarantees of accuracy for the information
that is supplied. Much effort is expended to make our website
as correct as possible, but Retrosheet shall not be held
responsible for any consequences arising from the use the
material presented here. All information is subject to corrections
as additional data are received. We are grateful to anyone who
discovers discrepancies and we appreciate learning of the details.
"""
import pandas as pd
from pybaseball.utils import get_zip_file
from datetime import datetime
gamelog_columns = ['date', 'game_num', 'day_of_week', 'visiting_team',
'visiting_team_league', 'visiting_game_num', 'home_team',
'home_team_league', 'home_team_game_num', 'visiting_score',
'home_score', 'num_outs', 'day_night', 'completion_info',
'forfeit_info', 'protest_info', 'park_id', 'attendance',
'time_of_game_minutes', 'visiting_line_score',
'home_line_score', 'visiting_abs', 'visiting_hits',
'visiting_doubles', 'visiting_triples', 'visiting_homeruns',
'visiting_rbi', 'visiting_sac_hits', 'visiting_sac_flies',
'visiting_hbp', 'visiting_bb', 'visiting_iw', 'visiting_k',
'visiting_sb', 'visiting_cs', 'visiting_gdp', 'visiting_ci',
'visiting_lob', 'visiting_pitchers_used',
'visiting_individual_er', 'visiting_er', 'visiting__wp',
'visiting_balks', 'visiting_po', 'visiting_assists',
'visiting_errors', 'visiting_pb', 'visiting_dp',
'visiting_tp', 'home_abs', 'home_hits', 'home_doubles',
'home_triples', 'home_homeruns', 'home_rbi',
'home_sac_hits', 'home_sac_flies', 'home_hbp', 'home_bb',
'home_iw', 'home_k', 'home_sb', 'home_cs', 'home_gdp',
'home_ci', 'home_lob', 'home_pitchers_used',
'home_individual_er', 'home_er', 'home_wp', 'home_balks',
'home_po', 'home_assists', 'home_errors', 'home_pb',
'home_dp', 'home_tp', 'ump_home_id', 'ump_home_name',
'ump_first_id', 'ump_first_name', 'ump_second_id',
'ump_second_name', 'ump_third_id', 'ump_third_name',
'ump_lf_id', 'ump_lf_name', 'ump_rf_id', 'ump_rf_name',
'visiting_manager_id', 'visiting_manager_name',
'home_manager_id', 'home_manager_name',
'winning_pitcher_id', 'winning_pitcher_name',
'losing_pitcher_id', 'losing_pitcher_name',
'save_pitcher_id', 'save_pitcher_name',
'game_winning_rbi_id', 'game_winning_rbi_name',
'visiting_starting_pitcher_id',
'visiting_starting_pitcher_name',
'home_starting_pitcher_id', 'home_starting_pitcher_name',
'visiting_1_id', 'visiting_1_name', 'visiting_1_pos',
'visiting_2_id', 'visiting_2_name', 'visiting_2_pos',
'visiting_2_id.1', 'visiting_3_name', 'visiting_3_pos',
'visiting_4_id', 'visiting_4_name', 'visiting_4_pos',
'visiting_5_id', 'visiting_5_name', 'visiting_5_pos',
'visiting_6_id', 'visiting_6_name', 'visiting_6_pos',
'visiting_7_id', 'visiting_7_name', 'visiting_7_pos',
'visiting_8_id', 'visiting_8_name', 'visiting_8_pos',
'visiting_9_id', 'visiting_9_name', 'visiting_9_pos',
'home_1_id', 'home_1_name', 'home_1_pos', 'home_2_id',
'home_2_name', 'home_2_pos', 'home_3_id', 'home_3_name',
'home_3_pos', 'home_4_id', 'home_4_name', 'home_4_pos',
'home_5_id', 'home_5_name', 'home_5_pos', 'home_6_id',
'home_6_name', 'home_6_pos', 'home_7_id', 'home_7_name',
'home_7_pos', 'home_8_id', 'home_8_name', 'home_8_pos',
'home_9_id', 'home_9_name', 'home_9_pos', 'misc',
'acquisition_info']
gamelog_url = 'http://www.retrosheet.org/gamelogs/gl{}.zip'
world_series_url = 'http://www.retrosheet.org/gamelogs/glws.zip'
all_star_url = 'http://www.retrosheet.org/gamelogs/glas.zip'
wild_card_url = 'http://www.retrosheet.org/gamelogs/glwc.zip'
division_series_url = 'http://www.retrosheet.org/gamelogs/gldv.zip'
lcs_url = 'http://www.retrosheet.org/gamelogs/gllc.zip'
def world_series_logs():
"""
Pull Retrosheet World Series Game Logs
"""
file_name = 'GLWS.TXT'
z = get_zip_file(world_series_url)
data = pd.read_csv(z.open(file_name), header=None, sep=',', quotechar='"')
data.columns = gamelog_columns
return data
def all_star_game_logs():
"""
Pull Retrosheet All Star Game Logs
"""
file_name = 'GLAS.TXT'
z = get_zip_file(all_star_url)
data = pd.read_csv(z.open(file_name), header=None, sep=',', quotechar='"')
data.columns = gamelog_columns
return data
def wild_card_logs():
"""
Pull Retrosheet Wild Card Game Logs
"""
file_name = 'GLWC.TXT'
z = get_zip_file(wild_card_url)
data = pd.read_csv(z.open(file_name), header=None, sep=',', quotechar='"')
data.columns = gamelog_columns
return data
def division_series_logs():
"""
Pull Retrosheet Division Series Game Logs
"""
file_name = 'GLDV.TXT'
z = get_zip_file(division_series_url)
data = pd.read_csv(z.open(file_name), header=None, sep=',', quotechar='"')
data.columns = gamelog_columns
return data
def lcs_logs():
"""
Pull Retrosheet LCS Game Logs
"""
file_name = 'GLLC.TXT'
z = get_zip_file(lcs_url)
data = pd.read_csv(z.open(file_name), header=None, sep=',', quotechar='"')
data.columns = gamelog_columns
return data
|
jldbc/pybaseball | pybaseball/retrosheet.py | world_series_logs | python | def world_series_logs():
file_name = 'GLWS.TXT'
z = get_zip_file(world_series_url)
data = pd.read_csv(z.open(file_name), header=None, sep=',', quotechar='"')
data.columns = gamelog_columns
return data | Pull Retrosheet World Series Game Logs | train | https://github.com/jldbc/pybaseball/blob/085ea26bfd1b5f5926d79d4fac985c88278115f2/pybaseball/retrosheet.py#L103-L111 | [
"def get_zip_file(url):\n \"\"\"\n Get zip file from provided URL\n \"\"\"\n with requests.get(url, stream=True) as f:\n z = zipfile.ZipFile(io.BytesIO(f.content))\n return z\n"
] | """
Retrosheet Data Notice:
Recipients of Retrosheet data are free to make any desired use of
the information, including (but not limited to) selling it,
giving it away, or producing a commercial product based upon the
data. Retrosheet has one requirement for any such transfer of
data or product development, which is that the following
statement must appear prominently:
The information used here was obtained free of
charge from and is copyrighted by Retrosheet. Interested
parties may contact Retrosheet at "www.retrosheet.org".
Retrosheet makes no guarantees of accuracy for the information
that is supplied. Much effort is expended to make our website
as correct as possible, but Retrosheet shall not be held
responsible for any consequences arising from the use the
material presented here. All information is subject to corrections
as additional data are received. We are grateful to anyone who
discovers discrepancies and we appreciate learning of the details.
"""
import pandas as pd
from pybaseball.utils import get_zip_file
from datetime import datetime
gamelog_columns = ['date', 'game_num', 'day_of_week', 'visiting_team',
'visiting_team_league', 'visiting_game_num', 'home_team',
'home_team_league', 'home_team_game_num', 'visiting_score',
'home_score', 'num_outs', 'day_night', 'completion_info',
'forfeit_info', 'protest_info', 'park_id', 'attendance',
'time_of_game_minutes', 'visiting_line_score',
'home_line_score', 'visiting_abs', 'visiting_hits',
'visiting_doubles', 'visiting_triples', 'visiting_homeruns',
'visiting_rbi', 'visiting_sac_hits', 'visiting_sac_flies',
'visiting_hbp', 'visiting_bb', 'visiting_iw', 'visiting_k',
'visiting_sb', 'visiting_cs', 'visiting_gdp', 'visiting_ci',
'visiting_lob', 'visiting_pitchers_used',
'visiting_individual_er', 'visiting_er', 'visiting__wp',
'visiting_balks', 'visiting_po', 'visiting_assists',
'visiting_errors', 'visiting_pb', 'visiting_dp',
'visiting_tp', 'home_abs', 'home_hits', 'home_doubles',
'home_triples', 'home_homeruns', 'home_rbi',
'home_sac_hits', 'home_sac_flies', 'home_hbp', 'home_bb',
'home_iw', 'home_k', 'home_sb', 'home_cs', 'home_gdp',
'home_ci', 'home_lob', 'home_pitchers_used',
'home_individual_er', 'home_er', 'home_wp', 'home_balks',
'home_po', 'home_assists', 'home_errors', 'home_pb',
'home_dp', 'home_tp', 'ump_home_id', 'ump_home_name',
'ump_first_id', 'ump_first_name', 'ump_second_id',
'ump_second_name', 'ump_third_id', 'ump_third_name',
'ump_lf_id', 'ump_lf_name', 'ump_rf_id', 'ump_rf_name',
'visiting_manager_id', 'visiting_manager_name',
'home_manager_id', 'home_manager_name',
'winning_pitcher_id', 'winning_pitcher_name',
'losing_pitcher_id', 'losing_pitcher_name',
'save_pitcher_id', 'save_pitcher_name',
'game_winning_rbi_id', 'game_winning_rbi_name',
'visiting_starting_pitcher_id',
'visiting_starting_pitcher_name',
'home_starting_pitcher_id', 'home_starting_pitcher_name',
'visiting_1_id', 'visiting_1_name', 'visiting_1_pos',
'visiting_2_id', 'visiting_2_name', 'visiting_2_pos',
'visiting_2_id.1', 'visiting_3_name', 'visiting_3_pos',
'visiting_4_id', 'visiting_4_name', 'visiting_4_pos',
'visiting_5_id', 'visiting_5_name', 'visiting_5_pos',
'visiting_6_id', 'visiting_6_name', 'visiting_6_pos',
'visiting_7_id', 'visiting_7_name', 'visiting_7_pos',
'visiting_8_id', 'visiting_8_name', 'visiting_8_pos',
'visiting_9_id', 'visiting_9_name', 'visiting_9_pos',
'home_1_id', 'home_1_name', 'home_1_pos', 'home_2_id',
'home_2_name', 'home_2_pos', 'home_3_id', 'home_3_name',
'home_3_pos', 'home_4_id', 'home_4_name', 'home_4_pos',
'home_5_id', 'home_5_name', 'home_5_pos', 'home_6_id',
'home_6_name', 'home_6_pos', 'home_7_id', 'home_7_name',
'home_7_pos', 'home_8_id', 'home_8_name', 'home_8_pos',
'home_9_id', 'home_9_name', 'home_9_pos', 'misc',
'acquisition_info']
gamelog_url = 'http://www.retrosheet.org/gamelogs/gl{}.zip'
world_series_url = 'http://www.retrosheet.org/gamelogs/glws.zip'
all_star_url = 'http://www.retrosheet.org/gamelogs/glas.zip'
wild_card_url = 'http://www.retrosheet.org/gamelogs/glwc.zip'
division_series_url = 'http://www.retrosheet.org/gamelogs/gldv.zip'
lcs_url = 'http://www.retrosheet.org/gamelogs/gllc.zip'
def season_game_logs(season):
"""
Pull Retrosheet game logs for a given season
"""
# validate input
max_year = int(datetime.now().year) - 1
if season > max_year or season < 1871:
raise ValueError('Season must be between 1871 and {}'.format(max_year))
file_name = 'GL{}.TXT'.format(season)
z = get_zip_file(gamelog_url.format(season))
data = pd.read_csv(z.open(file_name), header=None, sep=',', quotechar='"')
data.columns = gamelog_columns
return data
def all_star_game_logs():
"""
Pull Retrosheet All Star Game Logs
"""
file_name = 'GLAS.TXT'
z = get_zip_file(all_star_url)
data = pd.read_csv(z.open(file_name), header=None, sep=',', quotechar='"')
data.columns = gamelog_columns
return data
def wild_card_logs():
"""
Pull Retrosheet Wild Card Game Logs
"""
file_name = 'GLWC.TXT'
z = get_zip_file(wild_card_url)
data = pd.read_csv(z.open(file_name), header=None, sep=',', quotechar='"')
data.columns = gamelog_columns
return data
def division_series_logs():
"""
Pull Retrosheet Division Series Game Logs
"""
file_name = 'GLDV.TXT'
z = get_zip_file(division_series_url)
data = pd.read_csv(z.open(file_name), header=None, sep=',', quotechar='"')
data.columns = gamelog_columns
return data
def lcs_logs():
"""
Pull Retrosheet LCS Game Logs
"""
file_name = 'GLLC.TXT'
z = get_zip_file(lcs_url)
data = pd.read_csv(z.open(file_name), header=None, sep=',', quotechar='"')
data.columns = gamelog_columns
return data
|
jldbc/pybaseball | pybaseball/retrosheet.py | all_star_game_logs | python | def all_star_game_logs():
file_name = 'GLAS.TXT'
z = get_zip_file(all_star_url)
data = pd.read_csv(z.open(file_name), header=None, sep=',', quotechar='"')
data.columns = gamelog_columns
return data | Pull Retrosheet All Star Game Logs | train | https://github.com/jldbc/pybaseball/blob/085ea26bfd1b5f5926d79d4fac985c88278115f2/pybaseball/retrosheet.py#L114-L122 | [
"def get_zip_file(url):\n \"\"\"\n Get zip file from provided URL\n \"\"\"\n with requests.get(url, stream=True) as f:\n z = zipfile.ZipFile(io.BytesIO(f.content))\n return z\n"
] | """
Retrosheet Data Notice:
Recipients of Retrosheet data are free to make any desired use of
the information, including (but not limited to) selling it,
giving it away, or producing a commercial product based upon the
data. Retrosheet has one requirement for any such transfer of
data or product development, which is that the following
statement must appear prominently:
The information used here was obtained free of
charge from and is copyrighted by Retrosheet. Interested
parties may contact Retrosheet at "www.retrosheet.org".
Retrosheet makes no guarantees of accuracy for the information
that is supplied. Much effort is expended to make our website
as correct as possible, but Retrosheet shall not be held
responsible for any consequences arising from the use the
material presented here. All information is subject to corrections
as additional data are received. We are grateful to anyone who
discovers discrepancies and we appreciate learning of the details.
"""
import pandas as pd
from pybaseball.utils import get_zip_file
from datetime import datetime
gamelog_columns = ['date', 'game_num', 'day_of_week', 'visiting_team',
'visiting_team_league', 'visiting_game_num', 'home_team',
'home_team_league', 'home_team_game_num', 'visiting_score',
'home_score', 'num_outs', 'day_night', 'completion_info',
'forfeit_info', 'protest_info', 'park_id', 'attendance',
'time_of_game_minutes', 'visiting_line_score',
'home_line_score', 'visiting_abs', 'visiting_hits',
'visiting_doubles', 'visiting_triples', 'visiting_homeruns',
'visiting_rbi', 'visiting_sac_hits', 'visiting_sac_flies',
'visiting_hbp', 'visiting_bb', 'visiting_iw', 'visiting_k',
'visiting_sb', 'visiting_cs', 'visiting_gdp', 'visiting_ci',
'visiting_lob', 'visiting_pitchers_used',
'visiting_individual_er', 'visiting_er', 'visiting__wp',
'visiting_balks', 'visiting_po', 'visiting_assists',
'visiting_errors', 'visiting_pb', 'visiting_dp',
'visiting_tp', 'home_abs', 'home_hits', 'home_doubles',
'home_triples', 'home_homeruns', 'home_rbi',
'home_sac_hits', 'home_sac_flies', 'home_hbp', 'home_bb',
'home_iw', 'home_k', 'home_sb', 'home_cs', 'home_gdp',
'home_ci', 'home_lob', 'home_pitchers_used',
'home_individual_er', 'home_er', 'home_wp', 'home_balks',
'home_po', 'home_assists', 'home_errors', 'home_pb',
'home_dp', 'home_tp', 'ump_home_id', 'ump_home_name',
'ump_first_id', 'ump_first_name', 'ump_second_id',
'ump_second_name', 'ump_third_id', 'ump_third_name',
'ump_lf_id', 'ump_lf_name', 'ump_rf_id', 'ump_rf_name',
'visiting_manager_id', 'visiting_manager_name',
'home_manager_id', 'home_manager_name',
'winning_pitcher_id', 'winning_pitcher_name',
'losing_pitcher_id', 'losing_pitcher_name',
'save_pitcher_id', 'save_pitcher_name',
'game_winning_rbi_id', 'game_winning_rbi_name',
'visiting_starting_pitcher_id',
'visiting_starting_pitcher_name',
'home_starting_pitcher_id', 'home_starting_pitcher_name',
'visiting_1_id', 'visiting_1_name', 'visiting_1_pos',
'visiting_2_id', 'visiting_2_name', 'visiting_2_pos',
'visiting_2_id.1', 'visiting_3_name', 'visiting_3_pos',
'visiting_4_id', 'visiting_4_name', 'visiting_4_pos',
'visiting_5_id', 'visiting_5_name', 'visiting_5_pos',
'visiting_6_id', 'visiting_6_name', 'visiting_6_pos',
'visiting_7_id', 'visiting_7_name', 'visiting_7_pos',
'visiting_8_id', 'visiting_8_name', 'visiting_8_pos',
'visiting_9_id', 'visiting_9_name', 'visiting_9_pos',
'home_1_id', 'home_1_name', 'home_1_pos', 'home_2_id',
'home_2_name', 'home_2_pos', 'home_3_id', 'home_3_name',
'home_3_pos', 'home_4_id', 'home_4_name', 'home_4_pos',
'home_5_id', 'home_5_name', 'home_5_pos', 'home_6_id',
'home_6_name', 'home_6_pos', 'home_7_id', 'home_7_name',
'home_7_pos', 'home_8_id', 'home_8_name', 'home_8_pos',
'home_9_id', 'home_9_name', 'home_9_pos', 'misc',
'acquisition_info']
gamelog_url = 'http://www.retrosheet.org/gamelogs/gl{}.zip'
world_series_url = 'http://www.retrosheet.org/gamelogs/glws.zip'
all_star_url = 'http://www.retrosheet.org/gamelogs/glas.zip'
wild_card_url = 'http://www.retrosheet.org/gamelogs/glwc.zip'
division_series_url = 'http://www.retrosheet.org/gamelogs/gldv.zip'
lcs_url = 'http://www.retrosheet.org/gamelogs/gllc.zip'
def season_game_logs(season):
"""
Pull Retrosheet game logs for a given season
"""
# validate input
max_year = int(datetime.now().year) - 1
if season > max_year or season < 1871:
raise ValueError('Season must be between 1871 and {}'.format(max_year))
file_name = 'GL{}.TXT'.format(season)
z = get_zip_file(gamelog_url.format(season))
data = pd.read_csv(z.open(file_name), header=None, sep=',', quotechar='"')
data.columns = gamelog_columns
return data
def world_series_logs():
"""
Pull Retrosheet World Series Game Logs
"""
file_name = 'GLWS.TXT'
z = get_zip_file(world_series_url)
data = pd.read_csv(z.open(file_name), header=None, sep=',', quotechar='"')
data.columns = gamelog_columns
return data
def wild_card_logs():
"""
Pull Retrosheet Wild Card Game Logs
"""
file_name = 'GLWC.TXT'
z = get_zip_file(wild_card_url)
data = pd.read_csv(z.open(file_name), header=None, sep=',', quotechar='"')
data.columns = gamelog_columns
return data
def division_series_logs():
"""
Pull Retrosheet Division Series Game Logs
"""
file_name = 'GLDV.TXT'
z = get_zip_file(division_series_url)
data = pd.read_csv(z.open(file_name), header=None, sep=',', quotechar='"')
data.columns = gamelog_columns
return data
def lcs_logs():
"""
Pull Retrosheet LCS Game Logs
"""
file_name = 'GLLC.TXT'
z = get_zip_file(lcs_url)
data = pd.read_csv(z.open(file_name), header=None, sep=',', quotechar='"')
data.columns = gamelog_columns
return data
|
jldbc/pybaseball | pybaseball/retrosheet.py | wild_card_logs | python | def wild_card_logs():
file_name = 'GLWC.TXT'
z = get_zip_file(wild_card_url)
data = pd.read_csv(z.open(file_name), header=None, sep=',', quotechar='"')
data.columns = gamelog_columns
return data | Pull Retrosheet Wild Card Game Logs | train | https://github.com/jldbc/pybaseball/blob/085ea26bfd1b5f5926d79d4fac985c88278115f2/pybaseball/retrosheet.py#L125-L133 | [
"def get_zip_file(url):\n \"\"\"\n Get zip file from provided URL\n \"\"\"\n with requests.get(url, stream=True) as f:\n z = zipfile.ZipFile(io.BytesIO(f.content))\n return z\n"
] | """
Retrosheet Data Notice:
Recipients of Retrosheet data are free to make any desired use of
the information, including (but not limited to) selling it,
giving it away, or producing a commercial product based upon the
data. Retrosheet has one requirement for any such transfer of
data or product development, which is that the following
statement must appear prominently:
The information used here was obtained free of
charge from and is copyrighted by Retrosheet. Interested
parties may contact Retrosheet at "www.retrosheet.org".
Retrosheet makes no guarantees of accuracy for the information
that is supplied. Much effort is expended to make our website
as correct as possible, but Retrosheet shall not be held
responsible for any consequences arising from the use the
material presented here. All information is subject to corrections
as additional data are received. We are grateful to anyone who
discovers discrepancies and we appreciate learning of the details.
"""
import pandas as pd
from pybaseball.utils import get_zip_file
from datetime import datetime
gamelog_columns = ['date', 'game_num', 'day_of_week', 'visiting_team',
'visiting_team_league', 'visiting_game_num', 'home_team',
'home_team_league', 'home_team_game_num', 'visiting_score',
'home_score', 'num_outs', 'day_night', 'completion_info',
'forfeit_info', 'protest_info', 'park_id', 'attendance',
'time_of_game_minutes', 'visiting_line_score',
'home_line_score', 'visiting_abs', 'visiting_hits',
'visiting_doubles', 'visiting_triples', 'visiting_homeruns',
'visiting_rbi', 'visiting_sac_hits', 'visiting_sac_flies',
'visiting_hbp', 'visiting_bb', 'visiting_iw', 'visiting_k',
'visiting_sb', 'visiting_cs', 'visiting_gdp', 'visiting_ci',
'visiting_lob', 'visiting_pitchers_used',
'visiting_individual_er', 'visiting_er', 'visiting__wp',
'visiting_balks', 'visiting_po', 'visiting_assists',
'visiting_errors', 'visiting_pb', 'visiting_dp',
'visiting_tp', 'home_abs', 'home_hits', 'home_doubles',
'home_triples', 'home_homeruns', 'home_rbi',
'home_sac_hits', 'home_sac_flies', 'home_hbp', 'home_bb',
'home_iw', 'home_k', 'home_sb', 'home_cs', 'home_gdp',
'home_ci', 'home_lob', 'home_pitchers_used',
'home_individual_er', 'home_er', 'home_wp', 'home_balks',
'home_po', 'home_assists', 'home_errors', 'home_pb',
'home_dp', 'home_tp', 'ump_home_id', 'ump_home_name',
'ump_first_id', 'ump_first_name', 'ump_second_id',
'ump_second_name', 'ump_third_id', 'ump_third_name',
'ump_lf_id', 'ump_lf_name', 'ump_rf_id', 'ump_rf_name',
'visiting_manager_id', 'visiting_manager_name',
'home_manager_id', 'home_manager_name',
'winning_pitcher_id', 'winning_pitcher_name',
'losing_pitcher_id', 'losing_pitcher_name',
'save_pitcher_id', 'save_pitcher_name',
'game_winning_rbi_id', 'game_winning_rbi_name',
'visiting_starting_pitcher_id',
'visiting_starting_pitcher_name',
'home_starting_pitcher_id', 'home_starting_pitcher_name',
'visiting_1_id', 'visiting_1_name', 'visiting_1_pos',
'visiting_2_id', 'visiting_2_name', 'visiting_2_pos',
'visiting_2_id.1', 'visiting_3_name', 'visiting_3_pos',
'visiting_4_id', 'visiting_4_name', 'visiting_4_pos',
'visiting_5_id', 'visiting_5_name', 'visiting_5_pos',
'visiting_6_id', 'visiting_6_name', 'visiting_6_pos',
'visiting_7_id', 'visiting_7_name', 'visiting_7_pos',
'visiting_8_id', 'visiting_8_name', 'visiting_8_pos',
'visiting_9_id', 'visiting_9_name', 'visiting_9_pos',
'home_1_id', 'home_1_name', 'home_1_pos', 'home_2_id',
'home_2_name', 'home_2_pos', 'home_3_id', 'home_3_name',
'home_3_pos', 'home_4_id', 'home_4_name', 'home_4_pos',
'home_5_id', 'home_5_name', 'home_5_pos', 'home_6_id',
'home_6_name', 'home_6_pos', 'home_7_id', 'home_7_name',
'home_7_pos', 'home_8_id', 'home_8_name', 'home_8_pos',
'home_9_id', 'home_9_name', 'home_9_pos', 'misc',
'acquisition_info']
gamelog_url = 'http://www.retrosheet.org/gamelogs/gl{}.zip'
world_series_url = 'http://www.retrosheet.org/gamelogs/glws.zip'
all_star_url = 'http://www.retrosheet.org/gamelogs/glas.zip'
wild_card_url = 'http://www.retrosheet.org/gamelogs/glwc.zip'
division_series_url = 'http://www.retrosheet.org/gamelogs/gldv.zip'
lcs_url = 'http://www.retrosheet.org/gamelogs/gllc.zip'
def season_game_logs(season):
"""
Pull Retrosheet game logs for a given season
"""
# validate input
max_year = int(datetime.now().year) - 1
if season > max_year or season < 1871:
raise ValueError('Season must be between 1871 and {}'.format(max_year))
file_name = 'GL{}.TXT'.format(season)
z = get_zip_file(gamelog_url.format(season))
data = pd.read_csv(z.open(file_name), header=None, sep=',', quotechar='"')
data.columns = gamelog_columns
return data
def world_series_logs():
"""
Pull Retrosheet World Series Game Logs
"""
file_name = 'GLWS.TXT'
z = get_zip_file(world_series_url)
data = pd.read_csv(z.open(file_name), header=None, sep=',', quotechar='"')
data.columns = gamelog_columns
return data
def all_star_game_logs():
"""
Pull Retrosheet All Star Game Logs
"""
file_name = 'GLAS.TXT'
z = get_zip_file(all_star_url)
data = pd.read_csv(z.open(file_name), header=None, sep=',', quotechar='"')
data.columns = gamelog_columns
return data
def division_series_logs():
"""
Pull Retrosheet Division Series Game Logs
"""
file_name = 'GLDV.TXT'
z = get_zip_file(division_series_url)
data = pd.read_csv(z.open(file_name), header=None, sep=',', quotechar='"')
data.columns = gamelog_columns
return data
def lcs_logs():
"""
Pull Retrosheet LCS Game Logs
"""
file_name = 'GLLC.TXT'
z = get_zip_file(lcs_url)
data = pd.read_csv(z.open(file_name), header=None, sep=',', quotechar='"')
data.columns = gamelog_columns
return data
|
jldbc/pybaseball | pybaseball/retrosheet.py | division_series_logs | python | def division_series_logs():
file_name = 'GLDV.TXT'
z = get_zip_file(division_series_url)
data = pd.read_csv(z.open(file_name), header=None, sep=',', quotechar='"')
data.columns = gamelog_columns
return data | Pull Retrosheet Division Series Game Logs | train | https://github.com/jldbc/pybaseball/blob/085ea26bfd1b5f5926d79d4fac985c88278115f2/pybaseball/retrosheet.py#L136-L144 | [
"def get_zip_file(url):\n \"\"\"\n Get zip file from provided URL\n \"\"\"\n with requests.get(url, stream=True) as f:\n z = zipfile.ZipFile(io.BytesIO(f.content))\n return z\n"
] | """
Retrosheet Data Notice:
Recipients of Retrosheet data are free to make any desired use of
the information, including (but not limited to) selling it,
giving it away, or producing a commercial product based upon the
data. Retrosheet has one requirement for any such transfer of
data or product development, which is that the following
statement must appear prominently:
The information used here was obtained free of
charge from and is copyrighted by Retrosheet. Interested
parties may contact Retrosheet at "www.retrosheet.org".
Retrosheet makes no guarantees of accuracy for the information
that is supplied. Much effort is expended to make our website
as correct as possible, but Retrosheet shall not be held
responsible for any consequences arising from the use the
material presented here. All information is subject to corrections
as additional data are received. We are grateful to anyone who
discovers discrepancies and we appreciate learning of the details.
"""
import pandas as pd
from pybaseball.utils import get_zip_file
from datetime import datetime
gamelog_columns = ['date', 'game_num', 'day_of_week', 'visiting_team',
'visiting_team_league', 'visiting_game_num', 'home_team',
'home_team_league', 'home_team_game_num', 'visiting_score',
'home_score', 'num_outs', 'day_night', 'completion_info',
'forfeit_info', 'protest_info', 'park_id', 'attendance',
'time_of_game_minutes', 'visiting_line_score',
'home_line_score', 'visiting_abs', 'visiting_hits',
'visiting_doubles', 'visiting_triples', 'visiting_homeruns',
'visiting_rbi', 'visiting_sac_hits', 'visiting_sac_flies',
'visiting_hbp', 'visiting_bb', 'visiting_iw', 'visiting_k',
'visiting_sb', 'visiting_cs', 'visiting_gdp', 'visiting_ci',
'visiting_lob', 'visiting_pitchers_used',
'visiting_individual_er', 'visiting_er', 'visiting__wp',
'visiting_balks', 'visiting_po', 'visiting_assists',
'visiting_errors', 'visiting_pb', 'visiting_dp',
'visiting_tp', 'home_abs', 'home_hits', 'home_doubles',
'home_triples', 'home_homeruns', 'home_rbi',
'home_sac_hits', 'home_sac_flies', 'home_hbp', 'home_bb',
'home_iw', 'home_k', 'home_sb', 'home_cs', 'home_gdp',
'home_ci', 'home_lob', 'home_pitchers_used',
'home_individual_er', 'home_er', 'home_wp', 'home_balks',
'home_po', 'home_assists', 'home_errors', 'home_pb',
'home_dp', 'home_tp', 'ump_home_id', 'ump_home_name',
'ump_first_id', 'ump_first_name', 'ump_second_id',
'ump_second_name', 'ump_third_id', 'ump_third_name',
'ump_lf_id', 'ump_lf_name', 'ump_rf_id', 'ump_rf_name',
'visiting_manager_id', 'visiting_manager_name',
'home_manager_id', 'home_manager_name',
'winning_pitcher_id', 'winning_pitcher_name',
'losing_pitcher_id', 'losing_pitcher_name',
'save_pitcher_id', 'save_pitcher_name',
'game_winning_rbi_id', 'game_winning_rbi_name',
'visiting_starting_pitcher_id',
'visiting_starting_pitcher_name',
'home_starting_pitcher_id', 'home_starting_pitcher_name',
'visiting_1_id', 'visiting_1_name', 'visiting_1_pos',
'visiting_2_id', 'visiting_2_name', 'visiting_2_pos',
'visiting_2_id.1', 'visiting_3_name', 'visiting_3_pos',
'visiting_4_id', 'visiting_4_name', 'visiting_4_pos',
'visiting_5_id', 'visiting_5_name', 'visiting_5_pos',
'visiting_6_id', 'visiting_6_name', 'visiting_6_pos',
'visiting_7_id', 'visiting_7_name', 'visiting_7_pos',
'visiting_8_id', 'visiting_8_name', 'visiting_8_pos',
'visiting_9_id', 'visiting_9_name', 'visiting_9_pos',
'home_1_id', 'home_1_name', 'home_1_pos', 'home_2_id',
'home_2_name', 'home_2_pos', 'home_3_id', 'home_3_name',
'home_3_pos', 'home_4_id', 'home_4_name', 'home_4_pos',
'home_5_id', 'home_5_name', 'home_5_pos', 'home_6_id',
'home_6_name', 'home_6_pos', 'home_7_id', 'home_7_name',
'home_7_pos', 'home_8_id', 'home_8_name', 'home_8_pos',
'home_9_id', 'home_9_name', 'home_9_pos', 'misc',
'acquisition_info']
gamelog_url = 'http://www.retrosheet.org/gamelogs/gl{}.zip'
world_series_url = 'http://www.retrosheet.org/gamelogs/glws.zip'
all_star_url = 'http://www.retrosheet.org/gamelogs/glas.zip'
wild_card_url = 'http://www.retrosheet.org/gamelogs/glwc.zip'
division_series_url = 'http://www.retrosheet.org/gamelogs/gldv.zip'
lcs_url = 'http://www.retrosheet.org/gamelogs/gllc.zip'
def season_game_logs(season):
"""
Pull Retrosheet game logs for a given season
"""
# validate input
max_year = int(datetime.now().year) - 1
if season > max_year or season < 1871:
raise ValueError('Season must be between 1871 and {}'.format(max_year))
file_name = 'GL{}.TXT'.format(season)
z = get_zip_file(gamelog_url.format(season))
data = pd.read_csv(z.open(file_name), header=None, sep=',', quotechar='"')
data.columns = gamelog_columns
return data
def world_series_logs():
"""
Pull Retrosheet World Series Game Logs
"""
file_name = 'GLWS.TXT'
z = get_zip_file(world_series_url)
data = pd.read_csv(z.open(file_name), header=None, sep=',', quotechar='"')
data.columns = gamelog_columns
return data
def all_star_game_logs():
"""
Pull Retrosheet All Star Game Logs
"""
file_name = 'GLAS.TXT'
z = get_zip_file(all_star_url)
data = pd.read_csv(z.open(file_name), header=None, sep=',', quotechar='"')
data.columns = gamelog_columns
return data
def wild_card_logs():
"""
Pull Retrosheet Wild Card Game Logs
"""
file_name = 'GLWC.TXT'
z = get_zip_file(wild_card_url)
data = pd.read_csv(z.open(file_name), header=None, sep=',', quotechar='"')
data.columns = gamelog_columns
return data
def lcs_logs():
"""
Pull Retrosheet LCS Game Logs
"""
file_name = 'GLLC.TXT'
z = get_zip_file(lcs_url)
data = pd.read_csv(z.open(file_name), header=None, sep=',', quotechar='"')
data.columns = gamelog_columns
return data
|
jldbc/pybaseball | pybaseball/retrosheet.py | lcs_logs | python | def lcs_logs():
file_name = 'GLLC.TXT'
z = get_zip_file(lcs_url)
data = pd.read_csv(z.open(file_name), header=None, sep=',', quotechar='"')
data.columns = gamelog_columns
return data | Pull Retrosheet LCS Game Logs | train | https://github.com/jldbc/pybaseball/blob/085ea26bfd1b5f5926d79d4fac985c88278115f2/pybaseball/retrosheet.py#L147-L155 | [
"def get_zip_file(url):\n \"\"\"\n Get zip file from provided URL\n \"\"\"\n with requests.get(url, stream=True) as f:\n z = zipfile.ZipFile(io.BytesIO(f.content))\n return z\n"
] | """
Retrosheet Data Notice:
Recipients of Retrosheet data are free to make any desired use of
the information, including (but not limited to) selling it,
giving it away, or producing a commercial product based upon the
data. Retrosheet has one requirement for any such transfer of
data or product development, which is that the following
statement must appear prominently:
The information used here was obtained free of
charge from and is copyrighted by Retrosheet. Interested
parties may contact Retrosheet at "www.retrosheet.org".
Retrosheet makes no guarantees of accuracy for the information
that is supplied. Much effort is expended to make our website
as correct as possible, but Retrosheet shall not be held
responsible for any consequences arising from the use the
material presented here. All information is subject to corrections
as additional data are received. We are grateful to anyone who
discovers discrepancies and we appreciate learning of the details.
"""
import pandas as pd
from pybaseball.utils import get_zip_file
from datetime import datetime
gamelog_columns = ['date', 'game_num', 'day_of_week', 'visiting_team',
'visiting_team_league', 'visiting_game_num', 'home_team',
'home_team_league', 'home_team_game_num', 'visiting_score',
'home_score', 'num_outs', 'day_night', 'completion_info',
'forfeit_info', 'protest_info', 'park_id', 'attendance',
'time_of_game_minutes', 'visiting_line_score',
'home_line_score', 'visiting_abs', 'visiting_hits',
'visiting_doubles', 'visiting_triples', 'visiting_homeruns',
'visiting_rbi', 'visiting_sac_hits', 'visiting_sac_flies',
'visiting_hbp', 'visiting_bb', 'visiting_iw', 'visiting_k',
'visiting_sb', 'visiting_cs', 'visiting_gdp', 'visiting_ci',
'visiting_lob', 'visiting_pitchers_used',
'visiting_individual_er', 'visiting_er', 'visiting__wp',
'visiting_balks', 'visiting_po', 'visiting_assists',
'visiting_errors', 'visiting_pb', 'visiting_dp',
'visiting_tp', 'home_abs', 'home_hits', 'home_doubles',
'home_triples', 'home_homeruns', 'home_rbi',
'home_sac_hits', 'home_sac_flies', 'home_hbp', 'home_bb',
'home_iw', 'home_k', 'home_sb', 'home_cs', 'home_gdp',
'home_ci', 'home_lob', 'home_pitchers_used',
'home_individual_er', 'home_er', 'home_wp', 'home_balks',
'home_po', 'home_assists', 'home_errors', 'home_pb',
'home_dp', 'home_tp', 'ump_home_id', 'ump_home_name',
'ump_first_id', 'ump_first_name', 'ump_second_id',
'ump_second_name', 'ump_third_id', 'ump_third_name',
'ump_lf_id', 'ump_lf_name', 'ump_rf_id', 'ump_rf_name',
'visiting_manager_id', 'visiting_manager_name',
'home_manager_id', 'home_manager_name',
'winning_pitcher_id', 'winning_pitcher_name',
'losing_pitcher_id', 'losing_pitcher_name',
'save_pitcher_id', 'save_pitcher_name',
'game_winning_rbi_id', 'game_winning_rbi_name',
'visiting_starting_pitcher_id',
'visiting_starting_pitcher_name',
'home_starting_pitcher_id', 'home_starting_pitcher_name',
'visiting_1_id', 'visiting_1_name', 'visiting_1_pos',
'visiting_2_id', 'visiting_2_name', 'visiting_2_pos',
'visiting_2_id.1', 'visiting_3_name', 'visiting_3_pos',
'visiting_4_id', 'visiting_4_name', 'visiting_4_pos',
'visiting_5_id', 'visiting_5_name', 'visiting_5_pos',
'visiting_6_id', 'visiting_6_name', 'visiting_6_pos',
'visiting_7_id', 'visiting_7_name', 'visiting_7_pos',
'visiting_8_id', 'visiting_8_name', 'visiting_8_pos',
'visiting_9_id', 'visiting_9_name', 'visiting_9_pos',
'home_1_id', 'home_1_name', 'home_1_pos', 'home_2_id',
'home_2_name', 'home_2_pos', 'home_3_id', 'home_3_name',
'home_3_pos', 'home_4_id', 'home_4_name', 'home_4_pos',
'home_5_id', 'home_5_name', 'home_5_pos', 'home_6_id',
'home_6_name', 'home_6_pos', 'home_7_id', 'home_7_name',
'home_7_pos', 'home_8_id', 'home_8_name', 'home_8_pos',
'home_9_id', 'home_9_name', 'home_9_pos', 'misc',
'acquisition_info']
gamelog_url = 'http://www.retrosheet.org/gamelogs/gl{}.zip'
world_series_url = 'http://www.retrosheet.org/gamelogs/glws.zip'
all_star_url = 'http://www.retrosheet.org/gamelogs/glas.zip'
wild_card_url = 'http://www.retrosheet.org/gamelogs/glwc.zip'
division_series_url = 'http://www.retrosheet.org/gamelogs/gldv.zip'
lcs_url = 'http://www.retrosheet.org/gamelogs/gllc.zip'
def season_game_logs(season):
"""
Pull Retrosheet game logs for a given season
"""
# validate input
max_year = int(datetime.now().year) - 1
if season > max_year or season < 1871:
raise ValueError('Season must be between 1871 and {}'.format(max_year))
file_name = 'GL{}.TXT'.format(season)
z = get_zip_file(gamelog_url.format(season))
data = pd.read_csv(z.open(file_name), header=None, sep=',', quotechar='"')
data.columns = gamelog_columns
return data
def world_series_logs():
"""
Pull Retrosheet World Series Game Logs
"""
file_name = 'GLWS.TXT'
z = get_zip_file(world_series_url)
data = pd.read_csv(z.open(file_name), header=None, sep=',', quotechar='"')
data.columns = gamelog_columns
return data
def all_star_game_logs():
"""
Pull Retrosheet All Star Game Logs
"""
file_name = 'GLAS.TXT'
z = get_zip_file(all_star_url)
data = pd.read_csv(z.open(file_name), header=None, sep=',', quotechar='"')
data.columns = gamelog_columns
return data
def wild_card_logs():
"""
Pull Retrosheet Wild Card Game Logs
"""
file_name = 'GLWC.TXT'
z = get_zip_file(wild_card_url)
data = pd.read_csv(z.open(file_name), header=None, sep=',', quotechar='"')
data.columns = gamelog_columns
return data
def division_series_logs():
"""
Pull Retrosheet Division Series Game Logs
"""
file_name = 'GLDV.TXT'
z = get_zip_file(division_series_url)
data = pd.read_csv(z.open(file_name), header=None, sep=',', quotechar='"')
data.columns = gamelog_columns
return data
|
jldbc/pybaseball | pybaseball/team_pitching.py | team_pitching | python | def team_pitching(start_season, end_season=None, league='all', ind=1):
if start_season is None:
raise ValueError("You need to provide at least one season to collect data for. Try team_pitching(season) or team_pitching(start_season, end_season).")
if end_season is None:
end_season = start_season
soup = get_soup(start_season=start_season, end_season=end_season, league=league, ind=ind)
table = get_table(soup, ind)
table = postprocessing(table)
return table | Get season-level pitching data aggregated by team.
ARGUMENTS:
start_season : int : first season you want data for (or the only season if you do not specify an end_season)
end_season : int : final season you want data for
league : "all", "nl", or "al"
ind : int : =1 if you want individual season level data, =0 if you want a team'ss aggreagate data over all seasons in the query | train | https://github.com/jldbc/pybaseball/blob/085ea26bfd1b5f5926d79d4fac985c88278115f2/pybaseball/team_pitching.py#L60-L77 | [
"def get_soup(start_season, end_season, league, ind):\n url = \"http://www.fangraphs.com/leaders.aspx?pos=all&stats=pit&lg={}&qual=0&type=c,4,5,11,7,8,13,-1,24,36,37,40,43,44,48,51,-1,6,45,62,-1,59&season={}&month=0&season1={}&ind={}&team=0,ts&rost=0&age=0&filter=&players=0&page=1_100000\"\n url = url.format(... | import numpy as np
import pandas as pd
from bs4 import BeautifulSoup
import requests
def get_soup(start_season, end_season, league, ind):
url = "http://www.fangraphs.com/leaders.aspx?pos=all&stats=pit&lg={}&qual=0&type=c,4,5,11,7,8,13,-1,24,36,37,40,43,44,48,51,-1,6,45,62,-1,59&season={}&month=0&season1={}&ind={}&team=0,ts&rost=0&age=0&filter=&players=0&page=1_100000"
url = url.format(league, end_season, start_season, ind)
s=requests.get(url).content
#print(s)
return BeautifulSoup(s, "lxml")
def get_table(soup, ind):
#doesn't work yet
tables = soup.find_all('table')
table = tables[11]
data = []
# couldn't find these in the table, hardcoding for now
if ind == 0:
headings = ["Team", "W","L","SV","G","GS","IP","SO","K/9","BB/9","HR/9","BABIP","LOB%","GB%","HR/FB","ERA","FIP","xFIP","WAR"]
else:
headings = ["Season","Team","W","L","SV","G","GS","IP","SO","K/9","BB/9","HR/9","BABIP","LOB%","GB%","HR/FB","ERA","FIP","xFIP","WAR"]
data.append(headings)
table_body = table.find('tbody')
rows = table_body.find_all('tr')
for row in rows:
cols = row.find_all('td')
cols = [ele.text.strip() for ele in cols]
data.append([ele for ele in cols[1:]])
data = pd.DataFrame(data)
data = data.rename(columns=data.iloc[0])
data = data.reindex(data.index.drop(0))
return data
def postprocessing(data):
# fill missing values with NaN
data.replace(r'^\s*$', np.nan, regex=True, inplace = True)
data.replace(r'^null$', np.nan, regex=True, inplace = True)
# convert percent strings to float values
percentages = ['GB%', 'HR/FB', 'LOB%']
for col in percentages:
# skip if column is all NA (happens for some of the more obscure stats + in older seasons)
if data[col].count()>0:
data[col] = data[col].str.strip(' %')
data[col] = data[col].str.strip('%')
data[col] = data[col].astype(float)/100.
else:
#print(col)
pass
# convert columns to numeric
not_numeric = ['Team']
numeric_cols = [col for col in data.columns if col not in not_numeric]
data[numeric_cols] = data[numeric_cols].astype(float)
return data
def team_pitching_bref(team, start_season, end_season=None):
"""
Get season-level Pitching Statistics for Specific Team (from Baseball-Reference)
ARGUMENTS:
team : str : The Team Abbreviation (i.e. 'NYY' for Yankees) of the Team you want data for
start_season : int : first season you want data for (or the only season if you do not specify an end_season)
end_season : int : final season you want data for
"""
if start_season is None:
raise ValueError("You need to provide at least one season to collect data for. Try team_pitching_bref(season) or team_pitching_bref(start_season, end_season).")
if end_season is None:
end_season = start_season
url = "https://www.baseball-reference.com/teams/{}".format(team)
data = []
headings = None
for season in range(start_season, end_season+1):
print("Getting Pitching Data: {} {}".format(season, team))
stats_url = "{}/{}.shtml".format(url, season)
response = requests.get(stats_url)
soup = BeautifulSoup(response.content, 'html.parser')
table = soup.find_all('table', {'id': 'team_pitching'})[0]
if headings is None:
headings = [row.text.strip() for row in table.find_all('th')[1:34]]
rows = table.find_all('tr')
for row in rows:
cols = row.find_all('td')
cols = [ele.text.strip() for ele in cols]
cols = [col.replace('*', '').replace('#', '') for col in cols] # Removes '*' and '#' from some names
cols = [col for col in cols if 'Totals' not in col and 'NL teams' not in col and 'AL teams' not in col] # Removes Team Totals and other rows
cols.insert(2, season)
data.append([ele for ele in cols[0:]])
headings.insert(2, "Year")
data = pd.DataFrame(data=data, columns=headings) # [:-5] # -5 to remove Team Totals and other rows (didn't work in multi-year queries)
data = data.dropna() # Removes Row of All Nones
return data
|
jldbc/pybaseball | pybaseball/team_pitching.py | team_pitching_bref | python | def team_pitching_bref(team, start_season, end_season=None):
if start_season is None:
raise ValueError("You need to provide at least one season to collect data for. Try team_pitching_bref(season) or team_pitching_bref(start_season, end_season).")
if end_season is None:
end_season = start_season
url = "https://www.baseball-reference.com/teams/{}".format(team)
data = []
headings = None
for season in range(start_season, end_season+1):
print("Getting Pitching Data: {} {}".format(season, team))
stats_url = "{}/{}.shtml".format(url, season)
response = requests.get(stats_url)
soup = BeautifulSoup(response.content, 'html.parser')
table = soup.find_all('table', {'id': 'team_pitching'})[0]
if headings is None:
headings = [row.text.strip() for row in table.find_all('th')[1:34]]
rows = table.find_all('tr')
for row in rows:
cols = row.find_all('td')
cols = [ele.text.strip() for ele in cols]
cols = [col.replace('*', '').replace('#', '') for col in cols] # Removes '*' and '#' from some names
cols = [col for col in cols if 'Totals' not in col and 'NL teams' not in col and 'AL teams' not in col] # Removes Team Totals and other rows
cols.insert(2, season)
data.append([ele for ele in cols[0:]])
headings.insert(2, "Year")
data = pd.DataFrame(data=data, columns=headings) # [:-5] # -5 to remove Team Totals and other rows (didn't work in multi-year queries)
data = data.dropna() # Removes Row of All Nones
return data | Get season-level Pitching Statistics for Specific Team (from Baseball-Reference)
ARGUMENTS:
team : str : The Team Abbreviation (i.e. 'NYY' for Yankees) of the Team you want data for
start_season : int : first season you want data for (or the only season if you do not specify an end_season)
end_season : int : final season you want data for | train | https://github.com/jldbc/pybaseball/blob/085ea26bfd1b5f5926d79d4fac985c88278115f2/pybaseball/team_pitching.py#L79-L121 | null | import numpy as np
import pandas as pd
from bs4 import BeautifulSoup
import requests
def get_soup(start_season, end_season, league, ind):
url = "http://www.fangraphs.com/leaders.aspx?pos=all&stats=pit&lg={}&qual=0&type=c,4,5,11,7,8,13,-1,24,36,37,40,43,44,48,51,-1,6,45,62,-1,59&season={}&month=0&season1={}&ind={}&team=0,ts&rost=0&age=0&filter=&players=0&page=1_100000"
url = url.format(league, end_season, start_season, ind)
s=requests.get(url).content
#print(s)
return BeautifulSoup(s, "lxml")
def get_table(soup, ind):
#doesn't work yet
tables = soup.find_all('table')
table = tables[11]
data = []
# couldn't find these in the table, hardcoding for now
if ind == 0:
headings = ["Team", "W","L","SV","G","GS","IP","SO","K/9","BB/9","HR/9","BABIP","LOB%","GB%","HR/FB","ERA","FIP","xFIP","WAR"]
else:
headings = ["Season","Team","W","L","SV","G","GS","IP","SO","K/9","BB/9","HR/9","BABIP","LOB%","GB%","HR/FB","ERA","FIP","xFIP","WAR"]
data.append(headings)
table_body = table.find('tbody')
rows = table_body.find_all('tr')
for row in rows:
cols = row.find_all('td')
cols = [ele.text.strip() for ele in cols]
data.append([ele for ele in cols[1:]])
data = pd.DataFrame(data)
data = data.rename(columns=data.iloc[0])
data = data.reindex(data.index.drop(0))
return data
def postprocessing(data):
# fill missing values with NaN
data.replace(r'^\s*$', np.nan, regex=True, inplace = True)
data.replace(r'^null$', np.nan, regex=True, inplace = True)
# convert percent strings to float values
percentages = ['GB%', 'HR/FB', 'LOB%']
for col in percentages:
# skip if column is all NA (happens for some of the more obscure stats + in older seasons)
if data[col].count()>0:
data[col] = data[col].str.strip(' %')
data[col] = data[col].str.strip('%')
data[col] = data[col].astype(float)/100.
else:
#print(col)
pass
# convert columns to numeric
not_numeric = ['Team']
numeric_cols = [col for col in data.columns if col not in not_numeric]
data[numeric_cols] = data[numeric_cols].astype(float)
return data
def team_pitching(start_season, end_season=None, league='all', ind=1):
"""
Get season-level pitching data aggregated by team.
ARGUMENTS:
start_season : int : first season you want data for (or the only season if you do not specify an end_season)
end_season : int : final season you want data for
league : "all", "nl", or "al"
ind : int : =1 if you want individual season level data, =0 if you want a team'ss aggreagate data over all seasons in the query
"""
if start_season is None:
raise ValueError("You need to provide at least one season to collect data for. Try team_pitching(season) or team_pitching(start_season, end_season).")
if end_season is None:
end_season = start_season
soup = get_soup(start_season=start_season, end_season=end_season, league=league, ind=ind)
table = get_table(soup, ind)
table = postprocessing(table)
return table
|
jldbc/pybaseball | pybaseball/statcast_batter.py | statcast_batter | python | def statcast_batter(start_dt=None, end_dt=None, player_id=None):
start_dt, end_dt, player_id = sanitize_input(start_dt, end_dt, player_id)
# inputs are valid if either both or zero dates are supplied. Not valid of only one given.
if start_dt and end_dt:
url = 'https://baseballsavant.mlb.com/statcast_search/csv?all=true&hfPT=&hfAB=&hfBBT=&hfPR=&hfZ=&stadium=&hfBBL=&hfNewZones=&hfGT=R%7CPO%7CS%7C=&hfSea=&hfSit=&player_type=batter&hfOuts=&opponent=&pitcher_throws=&batter_stands=&hfSA=&game_date_gt={}&game_date_lt={}&batters_lookup%5B%5D={}&team=&position=&hfRO=&home_road=&hfFlag=&metric_1=&hfInn=&min_pitches=0&min_results=0&group_by=name&sort_col=pitches&player_event_sort=h_launch_speed&sort_order=desc&min_abs=0&type=details&'
df = split_request(start_dt, end_dt, player_id, url)
return df | Pulls statcast pitch-level data from Baseball Savant for a given batter.
ARGUMENTS
start_dt : YYYY-MM-DD : the first date for which you want a player's statcast data
end_dt : YYYY-MM-DD : the final date for which you want data
player_id : INT : the player's MLBAM ID. Find this by calling pybaseball.playerid_lookup(last_name, first_name), finding the correct player, and selecting their key_mlbam. | train | https://github.com/jldbc/pybaseball/blob/085ea26bfd1b5f5926d79d4fac985c88278115f2/pybaseball/statcast_batter.py#L4-L18 | [
"def sanitize_input(start_dt, end_dt, player_id):\n # error if no player ID provided\n if player_id is None:\n raise ValueError(\"Player ID is required. If you need to find a player's id, try pybaseball.playerid_lookup(last_name, first_name) and use their key_mlbam. If you want statcast data for all pl... | from pybaseball.utils import sanitize_input, split_request
|
jldbc/pybaseball | pybaseball/playerid_lookup.py | playerid_reverse_lookup | python | def playerid_reverse_lookup(player_ids, key_type=None):
key_types = ('mlbam', 'retro', 'bbref', 'fangraphs', )
if not key_type:
key_type = key_types[0] # default is "mlbam" if key_type not provided
elif key_type not in key_types:
raise ValueError(
'[Key Type: {}] Invalid; Key Type must be one of "{}"'.format(key_type, '", "'.join(key_types))
)
table = get_lookup_table()
key = 'key_{}'.format(key_type)
results = table[table[key].isin(player_ids)]
results = results.reset_index().drop('index', 1)
return results | Retrieve a table of player information given a list of player ids
:param player_ids: list of player ids
:type player_ids: list
:param key_type: name of the key type being looked up (one of "mlbam", "retro", "bbref", or "fangraphs")
:type key_type: str
:rtype: :class:`pandas.core.frame.DataFrame` | train | https://github.com/jldbc/pybaseball/blob/085ea26bfd1b5f5926d79d4fac985c88278115f2/pybaseball/playerid_lookup.py#L46-L70 | [
"def get_lookup_table():\n print('Gathering player lookup table. This may take a moment.')\n url = \"https://raw.githubusercontent.com/chadwickbureau/register/master/data/people.csv\"\n s=requests.get(url).content\n table = pd.read_csv(io.StringIO(s.decode('utf-8')), dtype={'key_sr_nfl': object, 'key_sr... | import pandas as pd
import requests
import io
# dropped key_uuid. looks like a has we wouldn't need for anything.
# TODO: allow for typos. String similarity?
# TODO: allow user to submit list of multiple names
def get_lookup_table():
print('Gathering player lookup table. This may take a moment.')
url = "https://raw.githubusercontent.com/chadwickbureau/register/master/data/people.csv"
s=requests.get(url).content
table = pd.read_csv(io.StringIO(s.decode('utf-8')), dtype={'key_sr_nfl': object, 'key_sr_nba': object, 'key_sr_nhl': object})
#subset columns
cols_to_keep = ['name_last','name_first','key_mlbam', 'key_retro', 'key_bbref', 'key_fangraphs', 'mlb_played_first','mlb_played_last']
table = table[cols_to_keep]
#make these lowercase to avoid capitalization mistakes when searching
table['name_last'] = table['name_last'].str.lower()
table['name_first'] = table['name_first'].str.lower()
# Pandas cannot handle NaNs in integer columns. We need IDs to be ints for successful queries in statcast, etc.
# Workaround: replace ID NaNs with -1, then convert columns to integers. User will have to understand that -1 is not a valid ID.
table[['key_mlbam', 'key_fangraphs']] = table[['key_mlbam', 'key_fangraphs']].fillna(-1)
table[['key_mlbam', 'key_fangraphs']] = table[['key_mlbam', 'key_fangraphs']].astype(int) # originally returned as floats which is wrong
return table
def playerid_lookup(last, first=None):
# force input strings to lowercase
last = last.lower()
if first:
first = first.lower()
table = get_lookup_table()
if first is None:
results = table.loc[table['name_last']==last]
else:
results = table.loc[(table['name_last']==last) & (table['name_first']==first)]
#results[['key_mlbam', 'key_fangraphs', 'mlb_played_first', 'mlb_played_last']] = results[['key_mlbam', 'key_fangraphs', 'mlb_played_first', 'mlb_played_last']].astype(int) # originally returned as floats which is wrong
results = results.reset_index().drop('index', 1)
return results
# data = playerid_lookup('bonilla')
# data = playerid_lookup('bonilla', 'bobby')
|
jldbc/pybaseball | pybaseball/pitching_leaders.py | pitching_stats | python | def pitching_stats(start_season, end_season=None, league='all', qual=1, ind=1):
if start_season is None:
raise ValueError("You need to provide at least one season to collect data for. Try pitching_leaders(season) or pitching_leaders(start_season, end_season).")
if end_season is None:
end_season = start_season
soup = get_soup(start_season=start_season, end_season=end_season, league=league, qual=qual, ind=ind)
table = get_table(soup, ind)
return table | Get season-level pitching data from FanGraphs.
ARGUMENTS:
start_season : int : first season you want data for (or the only season if you do not specify an end_season)
end_season : int : final season you want data for
league : "all", "nl", or "al"
qual: minimum number of pitches thrown to be included in the data (integer). Use the string 'y' for fangraphs default.
ind : int : =1 if you want individual season-level data, =0 if you want a player's aggreagate data over all seasons in the query | train | https://github.com/jldbc/pybaseball/blob/085ea26bfd1b5f5926d79d4fac985c88278115f2/pybaseball/pitching_leaders.py#L63-L80 | [
"def get_soup(start_season, end_season, league, qual, ind):\n url = \"http://www.fangraphs.com/leaders.aspx?pos=all&stats=pit&lg={}&qual={}&type=c,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,50,51,52,53,54,55,56,57,58,59,60,61... | import pandas as pd
import numpy as np
from bs4 import BeautifulSoup
import requests
def get_soup(start_season, end_season, league, qual, ind):
url = "http://www.fangraphs.com/leaders.aspx?pos=all&stats=pit&lg={}&qual={}&type=c,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108,109,110,111,112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127,128,129,130,131,132,133,134,135,136,137,138,139,140,141,142,143,144,145,146,147,148,149,150,151,152,153,154,155,156,157,158,159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190,191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,261,262,263,264,265,266,267,268,269,270,271,272,273,274,275,276,277,278,279,280,281,282,283,284,285,286,287,288,289,290,291,292,293,294,295,296,297,298,299,-1&season={}&month=0&season1={}&ind={}&team=&rost=&age=&filter=&players=&page=1_100000"
url = url.format(league, qual, end_season, start_season, ind)
s=requests.get(url).content
#print(s)
return BeautifulSoup(s, "lxml")
def get_table(soup, ind):
tables = soup.find_all('table')
table = tables[11]
data = []
# pulls headings from the fangraphs table
headings = []
headingrows = table.find_all('th')
for row in headingrows[1:]:
headings.append(row.text.strip())
data.append(headings)
table_body = table.find('tbody')
rows = table_body.find_all('tr')
for row in rows:
cols = row.find_all('td')
cols = [ele.text.strip() for ele in cols]
data.append([ele for ele in cols[1:]])
data = pd.DataFrame(data)
data = data.rename(columns=data.iloc[0])
data = data.reindex(data.index.drop(0))
# replace emptry strings with NaN
data.replace(r'^\s*$', np.nan, regex=True, inplace = True)
# convert all percent strings to proper percetages
percentages = ['Contact% (pi)', 'Zone% (pi)','Z-Contact% (pi)','O-Contact% (pi)','Swing% (pi)','Z-Swing% (pi)','O-Swing% (pi)','SL% (pi)','SI% (pi)','SB% (pi)','KN% (pi)','FS% (pi)','FC% (pi)','FA% (pi)','CU% (pi)','CS% (pi)','CH% (pi)','TTO%','Hard%','Med%','Soft%','Oppo%','Cent%','Pull%','K-BB%','Zone% (pfx)','Contact% (pfx)','Z-Contact% (pfx)','O-Contact% (pfx)','Swing% (pfx)','Z-Swing% (pfx)','O-Swing% (pfx)','UN% (pfx)','KN% (pfx)','SC% (pfx)','CH% (pfx)','EP% (pfx)','KC% (pfx)','CU% (pfx)','SL% (pfx)','SI% (pfx)','FO% (pfx)','FS% (pfx)','FC% (pfx)','FT% (pfx)','FA% (pfx)','BB%','K%','SwStr%','F-Strike%','Zone%','Contact%','Z-Contact%','O-Contact%','Swing%','Z-Swing%','O-Swing%','XX%','KN%','SF%','CH%','CB%','CT%','SL%','FB%','BUH%','IFH%','HR/FB','IFFB%','GB%','LD%','LOB%', 'XX% (pi)', 'PO%']
for col in percentages:
# skip if column is all NA (happens for some of the more obscure stats + in older seasons)
if data[col].count()>0:
data[col] = data[col].str.strip(' %')
data[col] = data[col].str.strip('%')
data[col] = data[col].astype(float)/100.
else:
#print(col)
pass
#convert everything except name and team to numeric
cols_to_numeric = [col for col in data.columns if col not in ['Name', 'Team', 'Age Rng', 'Dollars']]
data[cols_to_numeric] = data[cols_to_numeric].astype(float)
#sort by WAR and wins so best players float to the top
data = data.sort_values(['WAR', 'W'], ascending=False)
#put WAR at the end because it looks better
cols = data.columns.tolist()
cols.insert(7, cols.pop(cols.index('WAR')))
data = data.reindex(columns= cols)
return data
|
jldbc/pybaseball | pybaseball/statcast.py | large_request | python | def large_request(start_dt,end_dt,d1,d2,step,verbose):
error_counter = 0 # count failed requests. If > X, break
no_success_msg_flag = False # a flag for passing over the success message of requests are failing
print("This is a large query, it may take a moment to complete")
dataframe_list = []
#step = 3 # number of days per mini-query (test this later to see how large I can make this without losing data)
d = d1 + datetime.timedelta(days=step)
while d <= d2: #while intermediate query end_dt <= global query end_dt, keep looping
# dates before 3/15 and after 11/15 will always be offseason
# if these dates are detected, check if the next season is within the user's query
# if yes, fast-forward to the next season to avoid empty requests
# if no, break the loop. all useful data has been pulled.
if ((d.month < 4 and d.day < 15) or (d1.month > 10 and d1.day > 14)):
if d2.year > d.year:
print('Skipping offseason dates')
d1 = d1.replace(month=3,day=15,year=d1.year+1)
d = d1 + datetime.timedelta(days=step+1)
else:
break
start_dt = d1.strftime('%Y-%m-%d')
intermediate_end_dt = d.strftime('%Y-%m-%d')
data = small_request(start_dt,intermediate_end_dt)
# append to list of dataframes if not empty or failed (failed requests have one row saying "Error: Query Timeout")
if data.shape[0] > 1:
dataframe_list.append(data)
# if it failed, retry up to three times
else:
success = 0
while success == 0:
data = small_request(start_dt,intermediate_end_dt)
if data.shape[0] > 1:
dataframe_list.append(data)
success = 1
else:
error_counter += 1
if error_counter > 2:
# this request is probably too large. Cut a day off of this request and make that its own separate request.
# For each, append to dataframe list if successful, skip and print error message if failed
tmp_end = d - datetime.timedelta(days=1)
tmp_end = tmp_end.strftime('%Y-%m-%d')
smaller_data_1 = small_request(start_dt, tmp_end)
smaller_data_2 = small_request(intermediate_end_dt,intermediate_end_dt)
if smaller_data_1.shape[0] > 1:
dataframe_list.append(smaller_data_1)
print("Completed sub-query from {} to {}".format(start_dt,tmp_end))
else:
print("Query unsuccessful for data from {} to {}. Skipping these dates.".format(start_dt,tmp_end))
if smaller_data_2.shape[0] > 1:
dataframe_list.append(smaller_data_2)
print("Completed sub-query from {} to {}".format(intermediate_end_dt,intermediate_end_dt))
else:
print("Query unsuccessful for data from {} to {}. Skipping these dates.".format(intermediate_end_dt,intermediate_end_dt))
no_success_msg_flag = True # flag for passing over the success message since this request failed
error_counter = 0 # reset counter
break
if verbose:
if no_success_msg_flag is False:
print("Completed sub-query from {} to {}".format(start_dt,intermediate_end_dt))
else:
no_success_msg_flag = False # if failed, reset this flag so message will send again next iteration
# increment dates
d1 = d + datetime.timedelta(days=1)
d = d + datetime.timedelta(days=step+1)
# if start date > end date after being incremented, the loop captured each date's data
if d1 > d2:
pass
# if start date <= end date, then there are a few leftover dates to grab data for.
else:
# start_dt from the earlier loop will work, but instead of d we now want the original end_dt
start_dt = d1.strftime('%Y-%m-%d')
data = small_request(start_dt,end_dt)
dataframe_list.append(data)
if verbose:
print("Completed sub-query from {} to {}".format(start_dt,end_dt))
# concatenate all dataframes into final result set
final_data = pd.concat(dataframe_list, axis=0)
return final_data | break start and end date into smaller increments, collecting all data in small chunks and appending all results to a common dataframe
end_dt is the date strings for the final day of the query
d1 and d2 are datetime objects for first and last day of query, for doing date math
a third datetime object (d) will be used to increment over time for the several intermediate queries | train | https://github.com/jldbc/pybaseball/blob/085ea26bfd1b5f5926d79d4fac985c88278115f2/pybaseball/statcast.py#L46-L134 | [
"def small_request(start_dt,end_dt):\n url = \"https://baseballsavant.mlb.com/statcast_search/csv?all=true&hfPT=&hfAB=&hfBBT=&hfPR=&hfZ=&stadium=&hfBBL=&hfNewZones=&hfGT=R%7CPO%7CS%7C=&hfSea=&hfSit=&player_type=pitcher&hfOuts=&opponent=&pitcher_throws=&batter_stands=&hfSA=&game_date_gt={}&game_date_lt={}&team=&p... | import numpy as np
import pandas as pd
import requests
import datetime
import warnings
import io
def validate_datestring(date_text):
try:
datetime.datetime.strptime(date_text, '%Y-%m-%d')
except ValueError:
raise ValueError("Incorrect data format, should be YYYY-MM-DD")
def sanitize_input(start_dt, end_dt):
# if no dates are supplied, assume they want yesterday's data
# send a warning in case they wanted to specify
if start_dt is None and end_dt is None:
today = datetime.datetime.today()
start_dt = (today - datetime.timedelta(1)).strftime("%Y-%m-%d")
end_dt = today.strftime("%Y-%m-%d")
print("Warning: no date range supplied. Returning yesterday's Statcast data. For a different date range, try get_statcast(start_dt, end_dt).")
#if only one date is supplied, assume they only want that day's stats
#query in this case is from date 1 to date 1
if start_dt is None:
start_dt = end_dt
if end_dt is None:
end_dt = start_dt
# now that both dates are not None, make sure they are valid date strings
validate_datestring(start_dt)
validate_datestring(end_dt)
return start_dt, end_dt
def single_game_request(game_pk):
url = "https://baseballsavant.mlb.com/statcast_search/csv?all=true&type=details&game_pk={game_pk}".format(game_pk=game_pk)
s=requests.get(url, timeout=None).content
data = pd.read_csv(io.StringIO(s.decode('utf-8')))#, error_bad_lines=False) # skips 'bad lines' breaking scrapes. still testing this.
return data
def small_request(start_dt,end_dt):
url = "https://baseballsavant.mlb.com/statcast_search/csv?all=true&hfPT=&hfAB=&hfBBT=&hfPR=&hfZ=&stadium=&hfBBL=&hfNewZones=&hfGT=R%7CPO%7CS%7C=&hfSea=&hfSit=&player_type=pitcher&hfOuts=&opponent=&pitcher_throws=&batter_stands=&hfSA=&game_date_gt={}&game_date_lt={}&team=&position=&hfRO=&home_road=&hfFlag=&metric_1=&hfInn=&min_pitches=0&min_results=0&group_by=name&sort_col=pitches&player_event_sort=h_launch_speed&sort_order=desc&min_abs=0&type=details&".format(start_dt, end_dt)
s=requests.get(url, timeout=None).content
data = pd.read_csv(io.StringIO(s.decode('utf-8')))#, error_bad_lines=False) # skips 'bad lines' breaking scrapes. still testing this.
return data
def postprocessing(data, team):
#replace empty entries and 'null' strings with np.NaN
data.replace(r'^\s*$', np.nan, regex=True, inplace = True)
data.replace(r'^null$', np.nan, regex=True, inplace = True)
# convert columns to numeric
not_numeric = ['sv_id', 'umpire', 'type', 'inning_topbot', 'bb_type', 'away_team', 'home_team', 'p_throws',
'stand', 'game_type', 'des', 'description', 'events', 'player_name', 'game_date', 'pitch_type', 'pitch_name']
numeric_cols = ['release_speed','release_pos_x','release_pos_z','batter','pitcher','zone','hit_location','balls',
'strikes','game_year','pfx_x','pfx_z','plate_x','plate_z','on_3b','on_2b','on_1b','outs_when_up','inning',
'hc_x','hc_y','fielder_2','vx0','vy0','vz0','ax','ay','az','sz_top','sz_bot',
'hit_distance_sc','launch_speed','launch_angle','effective_speed','release_spin_rate','release_extension',
'game_pk','pitcher.1','fielder_2.1','fielder_3','fielder_4','fielder_5',
'fielder_6','fielder_7','fielder_8','fielder_9','release_pos_y',
'estimated_ba_using_speedangle','estimated_woba_using_speedangle','woba_value','woba_denom','babip_value',
'iso_value','launch_speed_angle','at_bat_number','pitch_number','home_score','away_score','bat_score',
'fld_score','post_away_score','post_home_score','post_bat_score','post_fld_score']
data[numeric_cols] = data[numeric_cols].astype(float)
# convert date col to datetime data type and sort so that this returns in an order that makes sense (by date and game)
data['game_date'] = pd.to_datetime(data['game_date'], format='%Y-%m-%d')
data = data.sort_values(['game_date', 'game_pk', 'at_bat_number', 'pitch_number'], ascending=False)
#select only pitches from a particular team
valid_teams = ['MIN', 'PHI', 'BAL', 'NYY', 'LAD', 'OAK', 'SEA', 'TB', 'MIL', 'MIA',
'KC', 'TEX', 'CHC', 'ATL', 'COL', 'HOU', 'CIN', 'LAA', 'DET', 'TOR',
'PIT', 'NYM', 'CLE', 'CWS', 'STL', 'WSH', 'SF', 'SD', 'BOS','ARI','ANA','WAS']
if(team in valid_teams):
data = data.loc[(data['home_team']==team)|(data['away_team']==team)]
elif(team != None):
raise ValueError('Error: invalid team abbreviation. Valid team names are: {}'.format(valid_teams))
data = data.reset_index()
return data
def statcast(start_dt=None, end_dt=None, team=None, verbose=True):
"""
Pulls statcast play-level data from Baseball Savant for a given date range.
INPUTS:
start_dt: YYYY-MM-DD : the first date for which you want statcast data
end_dt: YYYY-MM-DD : the last date for which you want statcast data
team: optional (defaults to None) : city abbreviation of the team you want data for (e.g. SEA or BOS)
If no arguments are provided, this will return yesterday's statcast data. If one date is provided, it will return that date's statcast data.
"""
start_dt, end_dt = sanitize_input(start_dt, end_dt)
# 3 days or less -> a quick one-shot request. Greater than 3 days -> break it into multiple smaller queries
small_query_threshold = 5
# inputs are valid if either both or zero dates are supplied. Not valid of only one given.
if start_dt and end_dt:
# how many days worth of data are needed?
date_format = "%Y-%m-%d"
d1 = datetime.datetime.strptime(start_dt, date_format)
d2 = datetime.datetime.strptime(end_dt, date_format)
days_in_query = (d2 - d1).days
if days_in_query <= small_query_threshold:
data = small_request(start_dt,end_dt)
else:
data = large_request(start_dt,end_dt,d1,d2,step=small_query_threshold,verbose=verbose)
data = postprocessing(data, team)
return data
def statcast_single_game(game_pk, team=None):
"""
Pulls statcast play-level data from Baseball Savant for a single game,
identified by its MLB game ID (game_pk in statcast data)
INPUTS:
game_pk : 6-digit integer MLB game ID to retrieve
"""
data = single_game_request(game_pk)
data = postprocessing(data, team)
return data
|
jldbc/pybaseball | pybaseball/statcast.py | statcast | python | def statcast(start_dt=None, end_dt=None, team=None, verbose=True):
start_dt, end_dt = sanitize_input(start_dt, end_dt)
# 3 days or less -> a quick one-shot request. Greater than 3 days -> break it into multiple smaller queries
small_query_threshold = 5
# inputs are valid if either both or zero dates are supplied. Not valid of only one given.
if start_dt and end_dt:
# how many days worth of data are needed?
date_format = "%Y-%m-%d"
d1 = datetime.datetime.strptime(start_dt, date_format)
d2 = datetime.datetime.strptime(end_dt, date_format)
days_in_query = (d2 - d1).days
if days_in_query <= small_query_threshold:
data = small_request(start_dt,end_dt)
else:
data = large_request(start_dt,end_dt,d1,d2,step=small_query_threshold,verbose=verbose)
data = postprocessing(data, team)
return data | Pulls statcast play-level data from Baseball Savant for a given date range.
INPUTS:
start_dt: YYYY-MM-DD : the first date for which you want statcast data
end_dt: YYYY-MM-DD : the last date for which you want statcast data
team: optional (defaults to None) : city abbreviation of the team you want data for (e.g. SEA or BOS)
If no arguments are provided, this will return yesterday's statcast data. If one date is provided, it will return that date's statcast data. | train | https://github.com/jldbc/pybaseball/blob/085ea26bfd1b5f5926d79d4fac985c88278115f2/pybaseball/statcast.py#L173-L204 | [
"def sanitize_input(start_dt, end_dt):\n # if no dates are supplied, assume they want yesterday's data\n # send a warning in case they wanted to specify\n if start_dt is None and end_dt is None:\n today = datetime.datetime.today()\n start_dt = (today - datetime.timedelta(1)).strftime(\"%Y-%m-... | import numpy as np
import pandas as pd
import requests
import datetime
import warnings
import io
def validate_datestring(date_text):
try:
datetime.datetime.strptime(date_text, '%Y-%m-%d')
except ValueError:
raise ValueError("Incorrect data format, should be YYYY-MM-DD")
def sanitize_input(start_dt, end_dt):
# if no dates are supplied, assume they want yesterday's data
# send a warning in case they wanted to specify
if start_dt is None and end_dt is None:
today = datetime.datetime.today()
start_dt = (today - datetime.timedelta(1)).strftime("%Y-%m-%d")
end_dt = today.strftime("%Y-%m-%d")
print("Warning: no date range supplied. Returning yesterday's Statcast data. For a different date range, try get_statcast(start_dt, end_dt).")
#if only one date is supplied, assume they only want that day's stats
#query in this case is from date 1 to date 1
if start_dt is None:
start_dt = end_dt
if end_dt is None:
end_dt = start_dt
# now that both dates are not None, make sure they are valid date strings
validate_datestring(start_dt)
validate_datestring(end_dt)
return start_dt, end_dt
def single_game_request(game_pk):
url = "https://baseballsavant.mlb.com/statcast_search/csv?all=true&type=details&game_pk={game_pk}".format(game_pk=game_pk)
s=requests.get(url, timeout=None).content
data = pd.read_csv(io.StringIO(s.decode('utf-8')))#, error_bad_lines=False) # skips 'bad lines' breaking scrapes. still testing this.
return data
def small_request(start_dt,end_dt):
url = "https://baseballsavant.mlb.com/statcast_search/csv?all=true&hfPT=&hfAB=&hfBBT=&hfPR=&hfZ=&stadium=&hfBBL=&hfNewZones=&hfGT=R%7CPO%7CS%7C=&hfSea=&hfSit=&player_type=pitcher&hfOuts=&opponent=&pitcher_throws=&batter_stands=&hfSA=&game_date_gt={}&game_date_lt={}&team=&position=&hfRO=&home_road=&hfFlag=&metric_1=&hfInn=&min_pitches=0&min_results=0&group_by=name&sort_col=pitches&player_event_sort=h_launch_speed&sort_order=desc&min_abs=0&type=details&".format(start_dt, end_dt)
s=requests.get(url, timeout=None).content
data = pd.read_csv(io.StringIO(s.decode('utf-8')))#, error_bad_lines=False) # skips 'bad lines' breaking scrapes. still testing this.
return data
def large_request(start_dt,end_dt,d1,d2,step,verbose):
"""
break start and end date into smaller increments, collecting all data in small chunks and appending all results to a common dataframe
end_dt is the date strings for the final day of the query
d1 and d2 are datetime objects for first and last day of query, for doing date math
a third datetime object (d) will be used to increment over time for the several intermediate queries
"""
error_counter = 0 # count failed requests. If > X, break
no_success_msg_flag = False # a flag for passing over the success message of requests are failing
print("This is a large query, it may take a moment to complete")
dataframe_list = []
#step = 3 # number of days per mini-query (test this later to see how large I can make this without losing data)
d = d1 + datetime.timedelta(days=step)
while d <= d2: #while intermediate query end_dt <= global query end_dt, keep looping
# dates before 3/15 and after 11/15 will always be offseason
# if these dates are detected, check if the next season is within the user's query
# if yes, fast-forward to the next season to avoid empty requests
# if no, break the loop. all useful data has been pulled.
if ((d.month < 4 and d.day < 15) or (d1.month > 10 and d1.day > 14)):
if d2.year > d.year:
print('Skipping offseason dates')
d1 = d1.replace(month=3,day=15,year=d1.year+1)
d = d1 + datetime.timedelta(days=step+1)
else:
break
start_dt = d1.strftime('%Y-%m-%d')
intermediate_end_dt = d.strftime('%Y-%m-%d')
data = small_request(start_dt,intermediate_end_dt)
# append to list of dataframes if not empty or failed (failed requests have one row saying "Error: Query Timeout")
if data.shape[0] > 1:
dataframe_list.append(data)
# if it failed, retry up to three times
else:
success = 0
while success == 0:
data = small_request(start_dt,intermediate_end_dt)
if data.shape[0] > 1:
dataframe_list.append(data)
success = 1
else:
error_counter += 1
if error_counter > 2:
# this request is probably too large. Cut a day off of this request and make that its own separate request.
# For each, append to dataframe list if successful, skip and print error message if failed
tmp_end = d - datetime.timedelta(days=1)
tmp_end = tmp_end.strftime('%Y-%m-%d')
smaller_data_1 = small_request(start_dt, tmp_end)
smaller_data_2 = small_request(intermediate_end_dt,intermediate_end_dt)
if smaller_data_1.shape[0] > 1:
dataframe_list.append(smaller_data_1)
print("Completed sub-query from {} to {}".format(start_dt,tmp_end))
else:
print("Query unsuccessful for data from {} to {}. Skipping these dates.".format(start_dt,tmp_end))
if smaller_data_2.shape[0] > 1:
dataframe_list.append(smaller_data_2)
print("Completed sub-query from {} to {}".format(intermediate_end_dt,intermediate_end_dt))
else:
print("Query unsuccessful for data from {} to {}. Skipping these dates.".format(intermediate_end_dt,intermediate_end_dt))
no_success_msg_flag = True # flag for passing over the success message since this request failed
error_counter = 0 # reset counter
break
if verbose:
if no_success_msg_flag is False:
print("Completed sub-query from {} to {}".format(start_dt,intermediate_end_dt))
else:
no_success_msg_flag = False # if failed, reset this flag so message will send again next iteration
# increment dates
d1 = d + datetime.timedelta(days=1)
d = d + datetime.timedelta(days=step+1)
# if start date > end date after being incremented, the loop captured each date's data
if d1 > d2:
pass
# if start date <= end date, then there are a few leftover dates to grab data for.
else:
# start_dt from the earlier loop will work, but instead of d we now want the original end_dt
start_dt = d1.strftime('%Y-%m-%d')
data = small_request(start_dt,end_dt)
dataframe_list.append(data)
if verbose:
print("Completed sub-query from {} to {}".format(start_dt,end_dt))
# concatenate all dataframes into final result set
final_data = pd.concat(dataframe_list, axis=0)
return final_data
def postprocessing(data, team):
#replace empty entries and 'null' strings with np.NaN
data.replace(r'^\s*$', np.nan, regex=True, inplace = True)
data.replace(r'^null$', np.nan, regex=True, inplace = True)
# convert columns to numeric
not_numeric = ['sv_id', 'umpire', 'type', 'inning_topbot', 'bb_type', 'away_team', 'home_team', 'p_throws',
'stand', 'game_type', 'des', 'description', 'events', 'player_name', 'game_date', 'pitch_type', 'pitch_name']
numeric_cols = ['release_speed','release_pos_x','release_pos_z','batter','pitcher','zone','hit_location','balls',
'strikes','game_year','pfx_x','pfx_z','plate_x','plate_z','on_3b','on_2b','on_1b','outs_when_up','inning',
'hc_x','hc_y','fielder_2','vx0','vy0','vz0','ax','ay','az','sz_top','sz_bot',
'hit_distance_sc','launch_speed','launch_angle','effective_speed','release_spin_rate','release_extension',
'game_pk','pitcher.1','fielder_2.1','fielder_3','fielder_4','fielder_5',
'fielder_6','fielder_7','fielder_8','fielder_9','release_pos_y',
'estimated_ba_using_speedangle','estimated_woba_using_speedangle','woba_value','woba_denom','babip_value',
'iso_value','launch_speed_angle','at_bat_number','pitch_number','home_score','away_score','bat_score',
'fld_score','post_away_score','post_home_score','post_bat_score','post_fld_score']
data[numeric_cols] = data[numeric_cols].astype(float)
# convert date col to datetime data type and sort so that this returns in an order that makes sense (by date and game)
data['game_date'] = pd.to_datetime(data['game_date'], format='%Y-%m-%d')
data = data.sort_values(['game_date', 'game_pk', 'at_bat_number', 'pitch_number'], ascending=False)
#select only pitches from a particular team
valid_teams = ['MIN', 'PHI', 'BAL', 'NYY', 'LAD', 'OAK', 'SEA', 'TB', 'MIL', 'MIA',
'KC', 'TEX', 'CHC', 'ATL', 'COL', 'HOU', 'CIN', 'LAA', 'DET', 'TOR',
'PIT', 'NYM', 'CLE', 'CWS', 'STL', 'WSH', 'SF', 'SD', 'BOS','ARI','ANA','WAS']
if(team in valid_teams):
data = data.loc[(data['home_team']==team)|(data['away_team']==team)]
elif(team != None):
raise ValueError('Error: invalid team abbreviation. Valid team names are: {}'.format(valid_teams))
data = data.reset_index()
return data
def statcast_single_game(game_pk, team=None):
"""
Pulls statcast play-level data from Baseball Savant for a single game,
identified by its MLB game ID (game_pk in statcast data)
INPUTS:
game_pk : 6-digit integer MLB game ID to retrieve
"""
data = single_game_request(game_pk)
data = postprocessing(data, team)
return data
|
jldbc/pybaseball | pybaseball/statcast.py | statcast_single_game | python | def statcast_single_game(game_pk, team=None):
data = single_game_request(game_pk)
data = postprocessing(data, team)
return data | Pulls statcast play-level data from Baseball Savant for a single game,
identified by its MLB game ID (game_pk in statcast data)
INPUTS:
game_pk : 6-digit integer MLB game ID to retrieve | train | https://github.com/jldbc/pybaseball/blob/085ea26bfd1b5f5926d79d4fac985c88278115f2/pybaseball/statcast.py#L206-L216 | [
"def single_game_request(game_pk):\n\n url = \"https://baseballsavant.mlb.com/statcast_search/csv?all=true&type=details&game_pk={game_pk}\".format(game_pk=game_pk)\n s=requests.get(url, timeout=None).content\n data = pd.read_csv(io.StringIO(s.decode('utf-8')))#, error_bad_lines=False) # skips 'bad lines' b... | import numpy as np
import pandas as pd
import requests
import datetime
import warnings
import io
def validate_datestring(date_text):
try:
datetime.datetime.strptime(date_text, '%Y-%m-%d')
except ValueError:
raise ValueError("Incorrect data format, should be YYYY-MM-DD")
def sanitize_input(start_dt, end_dt):
# if no dates are supplied, assume they want yesterday's data
# send a warning in case they wanted to specify
if start_dt is None and end_dt is None:
today = datetime.datetime.today()
start_dt = (today - datetime.timedelta(1)).strftime("%Y-%m-%d")
end_dt = today.strftime("%Y-%m-%d")
print("Warning: no date range supplied. Returning yesterday's Statcast data. For a different date range, try get_statcast(start_dt, end_dt).")
#if only one date is supplied, assume they only want that day's stats
#query in this case is from date 1 to date 1
if start_dt is None:
start_dt = end_dt
if end_dt is None:
end_dt = start_dt
# now that both dates are not None, make sure they are valid date strings
validate_datestring(start_dt)
validate_datestring(end_dt)
return start_dt, end_dt
def single_game_request(game_pk):
url = "https://baseballsavant.mlb.com/statcast_search/csv?all=true&type=details&game_pk={game_pk}".format(game_pk=game_pk)
s=requests.get(url, timeout=None).content
data = pd.read_csv(io.StringIO(s.decode('utf-8')))#, error_bad_lines=False) # skips 'bad lines' breaking scrapes. still testing this.
return data
def small_request(start_dt,end_dt):
url = "https://baseballsavant.mlb.com/statcast_search/csv?all=true&hfPT=&hfAB=&hfBBT=&hfPR=&hfZ=&stadium=&hfBBL=&hfNewZones=&hfGT=R%7CPO%7CS%7C=&hfSea=&hfSit=&player_type=pitcher&hfOuts=&opponent=&pitcher_throws=&batter_stands=&hfSA=&game_date_gt={}&game_date_lt={}&team=&position=&hfRO=&home_road=&hfFlag=&metric_1=&hfInn=&min_pitches=0&min_results=0&group_by=name&sort_col=pitches&player_event_sort=h_launch_speed&sort_order=desc&min_abs=0&type=details&".format(start_dt, end_dt)
s=requests.get(url, timeout=None).content
data = pd.read_csv(io.StringIO(s.decode('utf-8')))#, error_bad_lines=False) # skips 'bad lines' breaking scrapes. still testing this.
return data
def large_request(start_dt,end_dt,d1,d2,step,verbose):
"""
break start and end date into smaller increments, collecting all data in small chunks and appending all results to a common dataframe
end_dt is the date strings for the final day of the query
d1 and d2 are datetime objects for first and last day of query, for doing date math
a third datetime object (d) will be used to increment over time for the several intermediate queries
"""
error_counter = 0 # count failed requests. If > X, break
no_success_msg_flag = False # a flag for passing over the success message of requests are failing
print("This is a large query, it may take a moment to complete")
dataframe_list = []
#step = 3 # number of days per mini-query (test this later to see how large I can make this without losing data)
d = d1 + datetime.timedelta(days=step)
while d <= d2: #while intermediate query end_dt <= global query end_dt, keep looping
# dates before 3/15 and after 11/15 will always be offseason
# if these dates are detected, check if the next season is within the user's query
# if yes, fast-forward to the next season to avoid empty requests
# if no, break the loop. all useful data has been pulled.
if ((d.month < 4 and d.day < 15) or (d1.month > 10 and d1.day > 14)):
if d2.year > d.year:
print('Skipping offseason dates')
d1 = d1.replace(month=3,day=15,year=d1.year+1)
d = d1 + datetime.timedelta(days=step+1)
else:
break
start_dt = d1.strftime('%Y-%m-%d')
intermediate_end_dt = d.strftime('%Y-%m-%d')
data = small_request(start_dt,intermediate_end_dt)
# append to list of dataframes if not empty or failed (failed requests have one row saying "Error: Query Timeout")
if data.shape[0] > 1:
dataframe_list.append(data)
# if it failed, retry up to three times
else:
success = 0
while success == 0:
data = small_request(start_dt,intermediate_end_dt)
if data.shape[0] > 1:
dataframe_list.append(data)
success = 1
else:
error_counter += 1
if error_counter > 2:
# this request is probably too large. Cut a day off of this request and make that its own separate request.
# For each, append to dataframe list if successful, skip and print error message if failed
tmp_end = d - datetime.timedelta(days=1)
tmp_end = tmp_end.strftime('%Y-%m-%d')
smaller_data_1 = small_request(start_dt, tmp_end)
smaller_data_2 = small_request(intermediate_end_dt,intermediate_end_dt)
if smaller_data_1.shape[0] > 1:
dataframe_list.append(smaller_data_1)
print("Completed sub-query from {} to {}".format(start_dt,tmp_end))
else:
print("Query unsuccessful for data from {} to {}. Skipping these dates.".format(start_dt,tmp_end))
if smaller_data_2.shape[0] > 1:
dataframe_list.append(smaller_data_2)
print("Completed sub-query from {} to {}".format(intermediate_end_dt,intermediate_end_dt))
else:
print("Query unsuccessful for data from {} to {}. Skipping these dates.".format(intermediate_end_dt,intermediate_end_dt))
no_success_msg_flag = True # flag for passing over the success message since this request failed
error_counter = 0 # reset counter
break
if verbose:
if no_success_msg_flag is False:
print("Completed sub-query from {} to {}".format(start_dt,intermediate_end_dt))
else:
no_success_msg_flag = False # if failed, reset this flag so message will send again next iteration
# increment dates
d1 = d + datetime.timedelta(days=1)
d = d + datetime.timedelta(days=step+1)
# if start date > end date after being incremented, the loop captured each date's data
if d1 > d2:
pass
# if start date <= end date, then there are a few leftover dates to grab data for.
else:
# start_dt from the earlier loop will work, but instead of d we now want the original end_dt
start_dt = d1.strftime('%Y-%m-%d')
data = small_request(start_dt,end_dt)
dataframe_list.append(data)
if verbose:
print("Completed sub-query from {} to {}".format(start_dt,end_dt))
# concatenate all dataframes into final result set
final_data = pd.concat(dataframe_list, axis=0)
return final_data
def postprocessing(data, team):
#replace empty entries and 'null' strings with np.NaN
data.replace(r'^\s*$', np.nan, regex=True, inplace = True)
data.replace(r'^null$', np.nan, regex=True, inplace = True)
# convert columns to numeric
not_numeric = ['sv_id', 'umpire', 'type', 'inning_topbot', 'bb_type', 'away_team', 'home_team', 'p_throws',
'stand', 'game_type', 'des', 'description', 'events', 'player_name', 'game_date', 'pitch_type', 'pitch_name']
numeric_cols = ['release_speed','release_pos_x','release_pos_z','batter','pitcher','zone','hit_location','balls',
'strikes','game_year','pfx_x','pfx_z','plate_x','plate_z','on_3b','on_2b','on_1b','outs_when_up','inning',
'hc_x','hc_y','fielder_2','vx0','vy0','vz0','ax','ay','az','sz_top','sz_bot',
'hit_distance_sc','launch_speed','launch_angle','effective_speed','release_spin_rate','release_extension',
'game_pk','pitcher.1','fielder_2.1','fielder_3','fielder_4','fielder_5',
'fielder_6','fielder_7','fielder_8','fielder_9','release_pos_y',
'estimated_ba_using_speedangle','estimated_woba_using_speedangle','woba_value','woba_denom','babip_value',
'iso_value','launch_speed_angle','at_bat_number','pitch_number','home_score','away_score','bat_score',
'fld_score','post_away_score','post_home_score','post_bat_score','post_fld_score']
data[numeric_cols] = data[numeric_cols].astype(float)
# convert date col to datetime data type and sort so that this returns in an order that makes sense (by date and game)
data['game_date'] = pd.to_datetime(data['game_date'], format='%Y-%m-%d')
data = data.sort_values(['game_date', 'game_pk', 'at_bat_number', 'pitch_number'], ascending=False)
#select only pitches from a particular team
valid_teams = ['MIN', 'PHI', 'BAL', 'NYY', 'LAD', 'OAK', 'SEA', 'TB', 'MIL', 'MIA',
'KC', 'TEX', 'CHC', 'ATL', 'COL', 'HOU', 'CIN', 'LAA', 'DET', 'TOR',
'PIT', 'NYM', 'CLE', 'CWS', 'STL', 'WSH', 'SF', 'SD', 'BOS','ARI','ANA','WAS']
if(team in valid_teams):
data = data.loc[(data['home_team']==team)|(data['away_team']==team)]
elif(team != None):
raise ValueError('Error: invalid team abbreviation. Valid team names are: {}'.format(valid_teams))
data = data.reset_index()
return data
def statcast(start_dt=None, end_dt=None, team=None, verbose=True):
"""
Pulls statcast play-level data from Baseball Savant for a given date range.
INPUTS:
start_dt: YYYY-MM-DD : the first date for which you want statcast data
end_dt: YYYY-MM-DD : the last date for which you want statcast data
team: optional (defaults to None) : city abbreviation of the team you want data for (e.g. SEA or BOS)
If no arguments are provided, this will return yesterday's statcast data. If one date is provided, it will return that date's statcast data.
"""
start_dt, end_dt = sanitize_input(start_dt, end_dt)
# 3 days or less -> a quick one-shot request. Greater than 3 days -> break it into multiple smaller queries
small_query_threshold = 5
# inputs are valid if either both or zero dates are supplied. Not valid of only one given.
if start_dt and end_dt:
# how many days worth of data are needed?
date_format = "%Y-%m-%d"
d1 = datetime.datetime.strptime(start_dt, date_format)
d2 = datetime.datetime.strptime(end_dt, date_format)
days_in_query = (d2 - d1).days
if days_in_query <= small_query_threshold:
data = small_request(start_dt,end_dt)
else:
data = large_request(start_dt,end_dt,d1,d2,step=small_query_threshold,verbose=verbose)
data = postprocessing(data, team)
return data
|
jldbc/pybaseball | pybaseball/league_pitching_stats.py | pitching_stats_range | python | def pitching_stats_range(start_dt=None, end_dt=None):
# ensure valid date strings, perform necessary processing for query
start_dt, end_dt = sanitize_input(start_dt, end_dt)
if datetime.datetime.strptime(start_dt, "%Y-%m-%d").year < 2008:
raise ValueError("Year must be 2008 or later")
if datetime.datetime.strptime(end_dt, "%Y-%m-%d").year < 2008:
raise ValueError("Year must be 2008 or later")
# retrieve html from baseball reference
soup = get_soup(start_dt, end_dt)
table = get_table(soup)
table = table.dropna(how='all') # drop if all columns are NA
#fix some strange formatting for percentage columns
table = table.replace('---%', np.nan)
#make sure these are all numeric
for column in ['Age', '#days', 'G', 'GS', 'W', 'L', 'SV', 'IP', 'H',
'R', 'ER', 'BB', 'SO', 'HR', 'HBP', 'ERA', 'AB', '2B',
'3B', 'IBB', 'GDP', 'SF', 'SB', 'CS', 'PO', 'BF', 'Pit',
'WHIP', 'BAbip', 'SO9', 'SO/W']:
table[column] = pd.to_numeric(table[column])
#convert str(xx%) values to float(0.XX) decimal values
for column in ['Str', 'StL', 'StS', 'GB/FB', 'LD', 'PU']:
table[column] = table[column].replace('%','',regex=True).astype('float')/100
table = table.drop('',1)
return table | Get all pitching stats for a set time range. This can be the past week, the
month of August, anything. Just supply the start and end date in YYYY-MM-DD
format. | train | https://github.com/jldbc/pybaseball/blob/085ea26bfd1b5f5926d79d4fac985c88278115f2/pybaseball/league_pitching_stats.py#L67-L96 | [
"def sanitize_input(start_dt, end_dt):\n # if no dates are supplied, assume they want yesterday's data\n # send a warning in case they wanted to specify\n if start_dt is None and end_dt is None:\n today = datetime.datetime.today()\n start_dt = (today - datetime.timedelta(1)).strftime(\"%Y-%m-... | import requests
import pandas as pd
import numpy as np
import io
from bs4 import BeautifulSoup
import datetime
def validate_datestring(date_text):
try:
datetime.datetime.strptime(date_text, '%Y-%m-%d')
except ValueError:
raise ValueError("Incorrect data format, should be YYYY-MM-DD")
def sanitize_input(start_dt, end_dt):
# if no dates are supplied, assume they want yesterday's data
# send a warning in case they wanted to specify
if start_dt is None and end_dt is None:
today = datetime.datetime.today()
start_dt = (today - datetime.timedelta(1)).strftime("%Y-%m-%d")
end_dt = today.strftime("%Y-%m-%d")
print("Warning: no date range supplied. Returning yesterday's data. For a different date range, try pitching_stats_range(start_dt, end_dt) or pitching_stats(season).")
#if only one date is supplied, assume they only want that day's stats
#query in this case is from date 1 to date 1
if start_dt is None:
start_dt = end_dt
if end_dt is None:
end_dt = start_dt
#if end date occurs before start date, swap them
if end_dt < start_dt:
temp = start_dt
start_dt = end_dt
end_dt = temp
# now that both dates are not None, make sure they are valid date strings
validate_datestring(start_dt)
validate_datestring(end_dt)
return start_dt, end_dt
def get_soup(start_dt, end_dt):
# get most recent standings if date not specified
if((start_dt is None) or (end_dt is None)):
print('Error: a date range needs to be specified')
return None
url = "http://www.baseball-reference.com/leagues/daily.cgi?user_team=&bust_cache=&type=p&lastndays=7&dates=fromandto&fromandto={}.{}&level=mlb&franch=&stat=&stat_value=0".format(start_dt, end_dt)
s = requests.get(url).content
return BeautifulSoup(s, "lxml")
def get_table(soup):
table = soup.find_all('table')[0]
data = []
headings = [th.get_text() for th in table.find("tr").find_all("th")][1:]
data.append(headings)
table_body = table.find('tbody')
rows = table_body.find_all('tr')
for row in rows:
cols = row.find_all('td')
cols = [ele.text.strip() for ele in cols]
data.append([ele for ele in cols])
data = pd.DataFrame(data)
data = data.rename(columns=data.iloc[0])
data = data.reindex(data.index.drop(0))
return data
def pitching_stats_bref(season=None):
"""
Get all pitching stats for a set season. If no argument is supplied, gives stats for
current season to date.
"""
if season is None:
season = datetime.datetime.today().strftime("%Y")
season = str(season)
start_dt = season + '-03-01' #opening day is always late march or early april
end_dt = season + '-11-01' #season is definitely over by November
return(pitching_stats_range(start_dt, end_dt))
def bwar_pitch(return_all=False):
"""
Get data from war_daily_pitch table. Returns WAR, its components, and a few other useful stats.
To get all fields from this table, supply argument return_all=True.
"""
url = "http://www.baseball-reference.com/data/war_daily_pitch.txt"
s = requests.get(url).content
c=pd.read_csv(io.StringIO(s.decode('utf-8')))
if return_all:
return c
else:
cols_to_keep = ['name_common', 'mlb_ID', 'player_ID', 'year_ID', 'team_ID', 'stint_ID', 'lg_ID',
'G', 'GS', 'RA','xRA', 'BIP', 'BIP_perc','salary', 'ERA_plus', 'WAR_rep', 'WAA',
'WAA_adj','WAR']
return c[cols_to_keep]
|
jldbc/pybaseball | pybaseball/league_pitching_stats.py | pitching_stats_bref | python | def pitching_stats_bref(season=None):
if season is None:
season = datetime.datetime.today().strftime("%Y")
season = str(season)
start_dt = season + '-03-01' #opening day is always late march or early april
end_dt = season + '-11-01' #season is definitely over by November
return(pitching_stats_range(start_dt, end_dt)) | Get all pitching stats for a set season. If no argument is supplied, gives stats for
current season to date. | train | https://github.com/jldbc/pybaseball/blob/085ea26bfd1b5f5926d79d4fac985c88278115f2/pybaseball/league_pitching_stats.py#L98-L108 | [
"def pitching_stats_range(start_dt=None, end_dt=None):\n \"\"\"\n Get all pitching stats for a set time range. This can be the past week, the \n month of August, anything. Just supply the start and end date in YYYY-MM-DD \n format. \n \"\"\"\n # ensure valid date strings, perform necessary process... | import requests
import pandas as pd
import numpy as np
import io
from bs4 import BeautifulSoup
import datetime
def validate_datestring(date_text):
try:
datetime.datetime.strptime(date_text, '%Y-%m-%d')
except ValueError:
raise ValueError("Incorrect data format, should be YYYY-MM-DD")
def sanitize_input(start_dt, end_dt):
# if no dates are supplied, assume they want yesterday's data
# send a warning in case they wanted to specify
if start_dt is None and end_dt is None:
today = datetime.datetime.today()
start_dt = (today - datetime.timedelta(1)).strftime("%Y-%m-%d")
end_dt = today.strftime("%Y-%m-%d")
print("Warning: no date range supplied. Returning yesterday's data. For a different date range, try pitching_stats_range(start_dt, end_dt) or pitching_stats(season).")
#if only one date is supplied, assume they only want that day's stats
#query in this case is from date 1 to date 1
if start_dt is None:
start_dt = end_dt
if end_dt is None:
end_dt = start_dt
#if end date occurs before start date, swap them
if end_dt < start_dt:
temp = start_dt
start_dt = end_dt
end_dt = temp
# now that both dates are not None, make sure they are valid date strings
validate_datestring(start_dt)
validate_datestring(end_dt)
return start_dt, end_dt
def get_soup(start_dt, end_dt):
# get most recent standings if date not specified
if((start_dt is None) or (end_dt is None)):
print('Error: a date range needs to be specified')
return None
url = "http://www.baseball-reference.com/leagues/daily.cgi?user_team=&bust_cache=&type=p&lastndays=7&dates=fromandto&fromandto={}.{}&level=mlb&franch=&stat=&stat_value=0".format(start_dt, end_dt)
s = requests.get(url).content
return BeautifulSoup(s, "lxml")
def get_table(soup):
table = soup.find_all('table')[0]
data = []
headings = [th.get_text() for th in table.find("tr").find_all("th")][1:]
data.append(headings)
table_body = table.find('tbody')
rows = table_body.find_all('tr')
for row in rows:
cols = row.find_all('td')
cols = [ele.text.strip() for ele in cols]
data.append([ele for ele in cols])
data = pd.DataFrame(data)
data = data.rename(columns=data.iloc[0])
data = data.reindex(data.index.drop(0))
return data
def pitching_stats_range(start_dt=None, end_dt=None):
"""
Get all pitching stats for a set time range. This can be the past week, the
month of August, anything. Just supply the start and end date in YYYY-MM-DD
format.
"""
# ensure valid date strings, perform necessary processing for query
start_dt, end_dt = sanitize_input(start_dt, end_dt)
if datetime.datetime.strptime(start_dt, "%Y-%m-%d").year < 2008:
raise ValueError("Year must be 2008 or later")
if datetime.datetime.strptime(end_dt, "%Y-%m-%d").year < 2008:
raise ValueError("Year must be 2008 or later")
# retrieve html from baseball reference
soup = get_soup(start_dt, end_dt)
table = get_table(soup)
table = table.dropna(how='all') # drop if all columns are NA
#fix some strange formatting for percentage columns
table = table.replace('---%', np.nan)
#make sure these are all numeric
for column in ['Age', '#days', 'G', 'GS', 'W', 'L', 'SV', 'IP', 'H',
'R', 'ER', 'BB', 'SO', 'HR', 'HBP', 'ERA', 'AB', '2B',
'3B', 'IBB', 'GDP', 'SF', 'SB', 'CS', 'PO', 'BF', 'Pit',
'WHIP', 'BAbip', 'SO9', 'SO/W']:
table[column] = pd.to_numeric(table[column])
#convert str(xx%) values to float(0.XX) decimal values
for column in ['Str', 'StL', 'StS', 'GB/FB', 'LD', 'PU']:
table[column] = table[column].replace('%','',regex=True).astype('float')/100
table = table.drop('',1)
return table
def bwar_pitch(return_all=False):
"""
Get data from war_daily_pitch table. Returns WAR, its components, and a few other useful stats.
To get all fields from this table, supply argument return_all=True.
"""
url = "http://www.baseball-reference.com/data/war_daily_pitch.txt"
s = requests.get(url).content
c=pd.read_csv(io.StringIO(s.decode('utf-8')))
if return_all:
return c
else:
cols_to_keep = ['name_common', 'mlb_ID', 'player_ID', 'year_ID', 'team_ID', 'stint_ID', 'lg_ID',
'G', 'GS', 'RA','xRA', 'BIP', 'BIP_perc','salary', 'ERA_plus', 'WAR_rep', 'WAA',
'WAA_adj','WAR']
return c[cols_to_keep]
|
jldbc/pybaseball | pybaseball/league_pitching_stats.py | bwar_pitch | python | def bwar_pitch(return_all=False):
url = "http://www.baseball-reference.com/data/war_daily_pitch.txt"
s = requests.get(url).content
c=pd.read_csv(io.StringIO(s.decode('utf-8')))
if return_all:
return c
else:
cols_to_keep = ['name_common', 'mlb_ID', 'player_ID', 'year_ID', 'team_ID', 'stint_ID', 'lg_ID',
'G', 'GS', 'RA','xRA', 'BIP', 'BIP_perc','salary', 'ERA_plus', 'WAR_rep', 'WAA',
'WAA_adj','WAR']
return c[cols_to_keep] | Get data from war_daily_pitch table. Returns WAR, its components, and a few other useful stats.
To get all fields from this table, supply argument return_all=True. | train | https://github.com/jldbc/pybaseball/blob/085ea26bfd1b5f5926d79d4fac985c88278115f2/pybaseball/league_pitching_stats.py#L111-L125 | null | import requests
import pandas as pd
import numpy as np
import io
from bs4 import BeautifulSoup
import datetime
def validate_datestring(date_text):
try:
datetime.datetime.strptime(date_text, '%Y-%m-%d')
except ValueError:
raise ValueError("Incorrect data format, should be YYYY-MM-DD")
def sanitize_input(start_dt, end_dt):
# if no dates are supplied, assume they want yesterday's data
# send a warning in case they wanted to specify
if start_dt is None and end_dt is None:
today = datetime.datetime.today()
start_dt = (today - datetime.timedelta(1)).strftime("%Y-%m-%d")
end_dt = today.strftime("%Y-%m-%d")
print("Warning: no date range supplied. Returning yesterday's data. For a different date range, try pitching_stats_range(start_dt, end_dt) or pitching_stats(season).")
#if only one date is supplied, assume they only want that day's stats
#query in this case is from date 1 to date 1
if start_dt is None:
start_dt = end_dt
if end_dt is None:
end_dt = start_dt
#if end date occurs before start date, swap them
if end_dt < start_dt:
temp = start_dt
start_dt = end_dt
end_dt = temp
# now that both dates are not None, make sure they are valid date strings
validate_datestring(start_dt)
validate_datestring(end_dt)
return start_dt, end_dt
def get_soup(start_dt, end_dt):
# get most recent standings if date not specified
if((start_dt is None) or (end_dt is None)):
print('Error: a date range needs to be specified')
return None
url = "http://www.baseball-reference.com/leagues/daily.cgi?user_team=&bust_cache=&type=p&lastndays=7&dates=fromandto&fromandto={}.{}&level=mlb&franch=&stat=&stat_value=0".format(start_dt, end_dt)
s = requests.get(url).content
return BeautifulSoup(s, "lxml")
def get_table(soup):
table = soup.find_all('table')[0]
data = []
headings = [th.get_text() for th in table.find("tr").find_all("th")][1:]
data.append(headings)
table_body = table.find('tbody')
rows = table_body.find_all('tr')
for row in rows:
cols = row.find_all('td')
cols = [ele.text.strip() for ele in cols]
data.append([ele for ele in cols])
data = pd.DataFrame(data)
data = data.rename(columns=data.iloc[0])
data = data.reindex(data.index.drop(0))
return data
def pitching_stats_range(start_dt=None, end_dt=None):
"""
Get all pitching stats for a set time range. This can be the past week, the
month of August, anything. Just supply the start and end date in YYYY-MM-DD
format.
"""
# ensure valid date strings, perform necessary processing for query
start_dt, end_dt = sanitize_input(start_dt, end_dt)
if datetime.datetime.strptime(start_dt, "%Y-%m-%d").year < 2008:
raise ValueError("Year must be 2008 or later")
if datetime.datetime.strptime(end_dt, "%Y-%m-%d").year < 2008:
raise ValueError("Year must be 2008 or later")
# retrieve html from baseball reference
soup = get_soup(start_dt, end_dt)
table = get_table(soup)
table = table.dropna(how='all') # drop if all columns are NA
#fix some strange formatting for percentage columns
table = table.replace('---%', np.nan)
#make sure these are all numeric
for column in ['Age', '#days', 'G', 'GS', 'W', 'L', 'SV', 'IP', 'H',
'R', 'ER', 'BB', 'SO', 'HR', 'HBP', 'ERA', 'AB', '2B',
'3B', 'IBB', 'GDP', 'SF', 'SB', 'CS', 'PO', 'BF', 'Pit',
'WHIP', 'BAbip', 'SO9', 'SO/W']:
table[column] = pd.to_numeric(table[column])
#convert str(xx%) values to float(0.XX) decimal values
for column in ['Str', 'StL', 'StS', 'GB/FB', 'LD', 'PU']:
table[column] = table[column].replace('%','',regex=True).astype('float')/100
table = table.drop('',1)
return table
def pitching_stats_bref(season=None):
"""
Get all pitching stats for a set season. If no argument is supplied, gives stats for
current season to date.
"""
if season is None:
season = datetime.datetime.today().strftime("%Y")
season = str(season)
start_dt = season + '-03-01' #opening day is always late march or early april
end_dt = season + '-11-01' #season is definitely over by November
return(pitching_stats_range(start_dt, end_dt))
|
reincubate/ricloud | ricloud/utils.py | error_message_and_exit | python | def error_message_and_exit(message, error_result):
if message:
error_message(message)
puts(json.dumps(error_result, indent=2))
sys.exit(1) | Prints error messages in blue, the failed task result and quits. | train | https://github.com/reincubate/ricloud/blob/e46bce4529fbdca34a4190c18c7219e937e2b697/ricloud/utils.py#L82-L87 | [
"def error_message(message):\n \"\"\"Prints error type messages in red.\"\"\"\n print_message('\\n' + message, colour='red')\n"
] | from __future__ import print_function
import os
import sys
import time
import json
import shutil
import logging
import requests
from multiprocessing.pool import ThreadPool
from clint.textui import prompt, puts, colored, indent
from .conf import settings, OUTPUT_DIR
from .samples import get_samples
logger = logging.getLogger(__name__)
def get_or_create_filepath(filename, directory=''):
absolute_dir = os.path.join(OUTPUT_DIR, directory)
if not os.path.exists(absolute_dir):
os.makedirs(absolute_dir)
return os.path.join(absolute_dir, filename)
def save_file_stream_to_target_path(file_stream, target_path, base_dir=OUTPUT_DIR):
path = os.path.join(base_dir, target_path)
if len(path) > 250:
raise Exception('Path too long, could not write file to disk.')
if not os.path.exists(os.path.dirname(path)):
os.makedirs(os.path.dirname(path))
if os.path.isfile(path):
os.remove(path)
with open(path, 'w') as f:
shutil.copyfileobj(file_stream, f)
return path
def utf8(message):
if isinstance(message, unicode):
message = message.encode('utf-8')
return message
def print_message(message, colour='white'):
"""Prints `message` to the console in the specified `colour`.
Makes sure special characters are encoded properly (for example, emoticons
in device names).
"""
puts(getattr(colored, colour)(utf8(message)))
def info_message(message):
"""Prints info type messages in green."""
print_message(message, colour='green')
def prompt_message(message):
"""Prints prompt type messages in blue."""
print_message('\n' + message, colour='blue')
def pending_message(message):
"""Prints pending type messages in yellow."""
print_message('\n' + message, colour='yellow')
def error_message(message):
"""Prints error type messages in red."""
print_message('\n' + message, colour='red')
def print_prompt_values(values, message=None, sub_attr=None):
"""Prints prompt title and choices with a bit of formatting."""
if message:
prompt_message(message)
for index, entry in enumerate(values):
if sub_attr:
line = '{:2d}: {}'.format(index, getattr(utf8(entry), sub_attr))
else:
line = '{:2d}: {}'.format(index, utf8(entry))
with indent(3):
print_message(line)
def prompt_for_input(message, input_type=None):
"""Prints prompt instruction and does basic input parsing."""
while True:
output = prompt.query(message)
if input_type:
try:
output = input_type(output)
except ValueError:
error_message('Invalid input type')
continue
break
return output
def prompt_for_choice(values, message, input_type=int, output_type=None):
"""Prints prompt with a list of choices to choose from."""
output = None
while not output:
index = prompt_for_input(message, input_type=input_type)
try:
output = utf8(values[index])
except IndexError:
error_message('Selection out of range')
continue
if output_type:
output = output_type(output)
return output
def select_item(items, prompt_instruction, prompt_title, sub_attr=None, output_type=None):
print_prompt_values(items, prompt_title, sub_attr)
return prompt_for_choice(items, prompt_instruction, output_type=output_type)
def select_service(response):
return select_item(
response.services,
'Please select a service index:',
'Authorized services:',
output_type=str
)
def select_samples(response, service_name, payload):
samples = get_samples(service_name)
selected_sample = select_item(
samples,
'Please select a sample application index:',
'Available sample applications:',
sub_attr='display_name'
)
return selected_sample(response, payload)
def profile(timer_text='', to_log=False):
def inner(func):
def wraps(*args, **kwargs):
timer_start = time.time()
ret = func(*args, **kwargs)
timer_end = time.time()
if settings.getboolean('logging', 'time_profile'):
delta = timer_end - timer_start
if to_log:
message = "{text} {delta:.9f}s".format(text=timer_text, delta=delta)
logger.debug(message)
else:
message = "{text} {delta:.2f}s".format(text=timer_text, delta=delta)
puts(colored.magenta(message))
return ret
return wraps
return inner
def append_profile_info(string, info):
return utf8(string) + colored.magenta(" completed in {0:.2f}s".format(info))
def _get_num_threads():
num_threads = int(settings.get('performance', 'object_store_greenlets'))
if not num_threads:
num_threads = 1
return num_threads
def threaded_get(request_instance):
return requests.request(
method='get',
url=request_instance[0],
headers=request_instance[1],
)
def concurrent_get(request_list):
max_threads = _get_num_threads()
number_requests = len(request_list)
if number_requests > 0:
pool = ThreadPool(number_requests) if number_requests <= max_threads else ThreadPool(max_threads)
results = pool.map(threaded_get, request_list)
pool.close()
pool.join()
return results
else:
return []
|
reincubate/ricloud | ricloud/utils.py | print_prompt_values | python | def print_prompt_values(values, message=None, sub_attr=None):
if message:
prompt_message(message)
for index, entry in enumerate(values):
if sub_attr:
line = '{:2d}: {}'.format(index, getattr(utf8(entry), sub_attr))
else:
line = '{:2d}: {}'.format(index, utf8(entry))
with indent(3):
print_message(line) | Prints prompt title and choices with a bit of formatting. | train | https://github.com/reincubate/ricloud/blob/e46bce4529fbdca34a4190c18c7219e937e2b697/ricloud/utils.py#L90-L102 | [
"def utf8(message):\n if isinstance(message, unicode):\n message = message.encode('utf-8')\n return message\n",
"def prompt_message(message):\n \"\"\"Prints prompt type messages in blue.\"\"\"\n print_message('\\n' + message, colour='blue')\n",
"def print_message(message, colour='white'):\n ... | from __future__ import print_function
import os
import sys
import time
import json
import shutil
import logging
import requests
from multiprocessing.pool import ThreadPool
from clint.textui import prompt, puts, colored, indent
from .conf import settings, OUTPUT_DIR
from .samples import get_samples
logger = logging.getLogger(__name__)
def get_or_create_filepath(filename, directory=''):
absolute_dir = os.path.join(OUTPUT_DIR, directory)
if not os.path.exists(absolute_dir):
os.makedirs(absolute_dir)
return os.path.join(absolute_dir, filename)
def save_file_stream_to_target_path(file_stream, target_path, base_dir=OUTPUT_DIR):
path = os.path.join(base_dir, target_path)
if len(path) > 250:
raise Exception('Path too long, could not write file to disk.')
if not os.path.exists(os.path.dirname(path)):
os.makedirs(os.path.dirname(path))
if os.path.isfile(path):
os.remove(path)
with open(path, 'w') as f:
shutil.copyfileobj(file_stream, f)
return path
def utf8(message):
if isinstance(message, unicode):
message = message.encode('utf-8')
return message
def print_message(message, colour='white'):
"""Prints `message` to the console in the specified `colour`.
Makes sure special characters are encoded properly (for example, emoticons
in device names).
"""
puts(getattr(colored, colour)(utf8(message)))
def info_message(message):
"""Prints info type messages in green."""
print_message(message, colour='green')
def prompt_message(message):
"""Prints prompt type messages in blue."""
print_message('\n' + message, colour='blue')
def pending_message(message):
"""Prints pending type messages in yellow."""
print_message('\n' + message, colour='yellow')
def error_message(message):
"""Prints error type messages in red."""
print_message('\n' + message, colour='red')
def error_message_and_exit(message, error_result):
"""Prints error messages in blue, the failed task result and quits."""
if message:
error_message(message)
puts(json.dumps(error_result, indent=2))
sys.exit(1)
def prompt_for_input(message, input_type=None):
"""Prints prompt instruction and does basic input parsing."""
while True:
output = prompt.query(message)
if input_type:
try:
output = input_type(output)
except ValueError:
error_message('Invalid input type')
continue
break
return output
def prompt_for_choice(values, message, input_type=int, output_type=None):
"""Prints prompt with a list of choices to choose from."""
output = None
while not output:
index = prompt_for_input(message, input_type=input_type)
try:
output = utf8(values[index])
except IndexError:
error_message('Selection out of range')
continue
if output_type:
output = output_type(output)
return output
def select_item(items, prompt_instruction, prompt_title, sub_attr=None, output_type=None):
print_prompt_values(items, prompt_title, sub_attr)
return prompt_for_choice(items, prompt_instruction, output_type=output_type)
def select_service(response):
return select_item(
response.services,
'Please select a service index:',
'Authorized services:',
output_type=str
)
def select_samples(response, service_name, payload):
samples = get_samples(service_name)
selected_sample = select_item(
samples,
'Please select a sample application index:',
'Available sample applications:',
sub_attr='display_name'
)
return selected_sample(response, payload)
def profile(timer_text='', to_log=False):
def inner(func):
def wraps(*args, **kwargs):
timer_start = time.time()
ret = func(*args, **kwargs)
timer_end = time.time()
if settings.getboolean('logging', 'time_profile'):
delta = timer_end - timer_start
if to_log:
message = "{text} {delta:.9f}s".format(text=timer_text, delta=delta)
logger.debug(message)
else:
message = "{text} {delta:.2f}s".format(text=timer_text, delta=delta)
puts(colored.magenta(message))
return ret
return wraps
return inner
def append_profile_info(string, info):
return utf8(string) + colored.magenta(" completed in {0:.2f}s".format(info))
def _get_num_threads():
num_threads = int(settings.get('performance', 'object_store_greenlets'))
if not num_threads:
num_threads = 1
return num_threads
def threaded_get(request_instance):
return requests.request(
method='get',
url=request_instance[0],
headers=request_instance[1],
)
def concurrent_get(request_list):
max_threads = _get_num_threads()
number_requests = len(request_list)
if number_requests > 0:
pool = ThreadPool(number_requests) if number_requests <= max_threads else ThreadPool(max_threads)
results = pool.map(threaded_get, request_list)
pool.close()
pool.join()
return results
else:
return []
|
reincubate/ricloud | ricloud/utils.py | prompt_for_input | python | def prompt_for_input(message, input_type=None):
while True:
output = prompt.query(message)
if input_type:
try:
output = input_type(output)
except ValueError:
error_message('Invalid input type')
continue
break
return output | Prints prompt instruction and does basic input parsing. | train | https://github.com/reincubate/ricloud/blob/e46bce4529fbdca34a4190c18c7219e937e2b697/ricloud/utils.py#L105-L119 | [
"def error_message(message):\n \"\"\"Prints error type messages in red.\"\"\"\n print_message('\\n' + message, colour='red')\n"
] | from __future__ import print_function
import os
import sys
import time
import json
import shutil
import logging
import requests
from multiprocessing.pool import ThreadPool
from clint.textui import prompt, puts, colored, indent
from .conf import settings, OUTPUT_DIR
from .samples import get_samples
logger = logging.getLogger(__name__)
def get_or_create_filepath(filename, directory=''):
absolute_dir = os.path.join(OUTPUT_DIR, directory)
if not os.path.exists(absolute_dir):
os.makedirs(absolute_dir)
return os.path.join(absolute_dir, filename)
def save_file_stream_to_target_path(file_stream, target_path, base_dir=OUTPUT_DIR):
path = os.path.join(base_dir, target_path)
if len(path) > 250:
raise Exception('Path too long, could not write file to disk.')
if not os.path.exists(os.path.dirname(path)):
os.makedirs(os.path.dirname(path))
if os.path.isfile(path):
os.remove(path)
with open(path, 'w') as f:
shutil.copyfileobj(file_stream, f)
return path
def utf8(message):
if isinstance(message, unicode):
message = message.encode('utf-8')
return message
def print_message(message, colour='white'):
"""Prints `message` to the console in the specified `colour`.
Makes sure special characters are encoded properly (for example, emoticons
in device names).
"""
puts(getattr(colored, colour)(utf8(message)))
def info_message(message):
"""Prints info type messages in green."""
print_message(message, colour='green')
def prompt_message(message):
"""Prints prompt type messages in blue."""
print_message('\n' + message, colour='blue')
def pending_message(message):
"""Prints pending type messages in yellow."""
print_message('\n' + message, colour='yellow')
def error_message(message):
"""Prints error type messages in red."""
print_message('\n' + message, colour='red')
def error_message_and_exit(message, error_result):
"""Prints error messages in blue, the failed task result and quits."""
if message:
error_message(message)
puts(json.dumps(error_result, indent=2))
sys.exit(1)
def print_prompt_values(values, message=None, sub_attr=None):
"""Prints prompt title and choices with a bit of formatting."""
if message:
prompt_message(message)
for index, entry in enumerate(values):
if sub_attr:
line = '{:2d}: {}'.format(index, getattr(utf8(entry), sub_attr))
else:
line = '{:2d}: {}'.format(index, utf8(entry))
with indent(3):
print_message(line)
def prompt_for_choice(values, message, input_type=int, output_type=None):
"""Prints prompt with a list of choices to choose from."""
output = None
while not output:
index = prompt_for_input(message, input_type=input_type)
try:
output = utf8(values[index])
except IndexError:
error_message('Selection out of range')
continue
if output_type:
output = output_type(output)
return output
def select_item(items, prompt_instruction, prompt_title, sub_attr=None, output_type=None):
print_prompt_values(items, prompt_title, sub_attr)
return prompt_for_choice(items, prompt_instruction, output_type=output_type)
def select_service(response):
return select_item(
response.services,
'Please select a service index:',
'Authorized services:',
output_type=str
)
def select_samples(response, service_name, payload):
samples = get_samples(service_name)
selected_sample = select_item(
samples,
'Please select a sample application index:',
'Available sample applications:',
sub_attr='display_name'
)
return selected_sample(response, payload)
def profile(timer_text='', to_log=False):
def inner(func):
def wraps(*args, **kwargs):
timer_start = time.time()
ret = func(*args, **kwargs)
timer_end = time.time()
if settings.getboolean('logging', 'time_profile'):
delta = timer_end - timer_start
if to_log:
message = "{text} {delta:.9f}s".format(text=timer_text, delta=delta)
logger.debug(message)
else:
message = "{text} {delta:.2f}s".format(text=timer_text, delta=delta)
puts(colored.magenta(message))
return ret
return wraps
return inner
def append_profile_info(string, info):
return utf8(string) + colored.magenta(" completed in {0:.2f}s".format(info))
def _get_num_threads():
num_threads = int(settings.get('performance', 'object_store_greenlets'))
if not num_threads:
num_threads = 1
return num_threads
def threaded_get(request_instance):
return requests.request(
method='get',
url=request_instance[0],
headers=request_instance[1],
)
def concurrent_get(request_list):
max_threads = _get_num_threads()
number_requests = len(request_list)
if number_requests > 0:
pool = ThreadPool(number_requests) if number_requests <= max_threads else ThreadPool(max_threads)
results = pool.map(threaded_get, request_list)
pool.close()
pool.join()
return results
else:
return []
|
reincubate/ricloud | ricloud/utils.py | prompt_for_choice | python | def prompt_for_choice(values, message, input_type=int, output_type=None):
output = None
while not output:
index = prompt_for_input(message, input_type=input_type)
try:
output = utf8(values[index])
except IndexError:
error_message('Selection out of range')
continue
if output_type:
output = output_type(output)
return output | Prints prompt with a list of choices to choose from. | train | https://github.com/reincubate/ricloud/blob/e46bce4529fbdca34a4190c18c7219e937e2b697/ricloud/utils.py#L122-L137 | [
"def utf8(message):\n if isinstance(message, unicode):\n message = message.encode('utf-8')\n return message\n",
"def error_message(message):\n \"\"\"Prints error type messages in red.\"\"\"\n print_message('\\n' + message, colour='red')\n",
"def prompt_for_input(message, input_type=None):\n ... | from __future__ import print_function
import os
import sys
import time
import json
import shutil
import logging
import requests
from multiprocessing.pool import ThreadPool
from clint.textui import prompt, puts, colored, indent
from .conf import settings, OUTPUT_DIR
from .samples import get_samples
logger = logging.getLogger(__name__)
def get_or_create_filepath(filename, directory=''):
absolute_dir = os.path.join(OUTPUT_DIR, directory)
if not os.path.exists(absolute_dir):
os.makedirs(absolute_dir)
return os.path.join(absolute_dir, filename)
def save_file_stream_to_target_path(file_stream, target_path, base_dir=OUTPUT_DIR):
path = os.path.join(base_dir, target_path)
if len(path) > 250:
raise Exception('Path too long, could not write file to disk.')
if not os.path.exists(os.path.dirname(path)):
os.makedirs(os.path.dirname(path))
if os.path.isfile(path):
os.remove(path)
with open(path, 'w') as f:
shutil.copyfileobj(file_stream, f)
return path
def utf8(message):
if isinstance(message, unicode):
message = message.encode('utf-8')
return message
def print_message(message, colour='white'):
"""Prints `message` to the console in the specified `colour`.
Makes sure special characters are encoded properly (for example, emoticons
in device names).
"""
puts(getattr(colored, colour)(utf8(message)))
def info_message(message):
"""Prints info type messages in green."""
print_message(message, colour='green')
def prompt_message(message):
"""Prints prompt type messages in blue."""
print_message('\n' + message, colour='blue')
def pending_message(message):
"""Prints pending type messages in yellow."""
print_message('\n' + message, colour='yellow')
def error_message(message):
"""Prints error type messages in red."""
print_message('\n' + message, colour='red')
def error_message_and_exit(message, error_result):
"""Prints error messages in blue, the failed task result and quits."""
if message:
error_message(message)
puts(json.dumps(error_result, indent=2))
sys.exit(1)
def print_prompt_values(values, message=None, sub_attr=None):
"""Prints prompt title and choices with a bit of formatting."""
if message:
prompt_message(message)
for index, entry in enumerate(values):
if sub_attr:
line = '{:2d}: {}'.format(index, getattr(utf8(entry), sub_attr))
else:
line = '{:2d}: {}'.format(index, utf8(entry))
with indent(3):
print_message(line)
def prompt_for_input(message, input_type=None):
"""Prints prompt instruction and does basic input parsing."""
while True:
output = prompt.query(message)
if input_type:
try:
output = input_type(output)
except ValueError:
error_message('Invalid input type')
continue
break
return output
def select_item(items, prompt_instruction, prompt_title, sub_attr=None, output_type=None):
print_prompt_values(items, prompt_title, sub_attr)
return prompt_for_choice(items, prompt_instruction, output_type=output_type)
def select_service(response):
return select_item(
response.services,
'Please select a service index:',
'Authorized services:',
output_type=str
)
def select_samples(response, service_name, payload):
samples = get_samples(service_name)
selected_sample = select_item(
samples,
'Please select a sample application index:',
'Available sample applications:',
sub_attr='display_name'
)
return selected_sample(response, payload)
def profile(timer_text='', to_log=False):
def inner(func):
def wraps(*args, **kwargs):
timer_start = time.time()
ret = func(*args, **kwargs)
timer_end = time.time()
if settings.getboolean('logging', 'time_profile'):
delta = timer_end - timer_start
if to_log:
message = "{text} {delta:.9f}s".format(text=timer_text, delta=delta)
logger.debug(message)
else:
message = "{text} {delta:.2f}s".format(text=timer_text, delta=delta)
puts(colored.magenta(message))
return ret
return wraps
return inner
def append_profile_info(string, info):
return utf8(string) + colored.magenta(" completed in {0:.2f}s".format(info))
def _get_num_threads():
num_threads = int(settings.get('performance', 'object_store_greenlets'))
if not num_threads:
num_threads = 1
return num_threads
def threaded_get(request_instance):
return requests.request(
method='get',
url=request_instance[0],
headers=request_instance[1],
)
def concurrent_get(request_list):
max_threads = _get_num_threads()
number_requests = len(request_list)
if number_requests > 0:
pool = ThreadPool(number_requests) if number_requests <= max_threads else ThreadPool(max_threads)
results = pool.map(threaded_get, request_list)
pool.close()
pool.join()
return results
else:
return []
|
reincubate/ricloud | ricloud/object_store.py | ObjectStore._retrieve_result | python | def _retrieve_result(endpoints, token_header):
request_list = [
(url, token_header)
for (task_id, url) in endpoints
]
responses = concurrent_get(request_list)
# Quick sanity check
assert len(endpoints) == len(responses)
responses_dic = {
task_id: r.content
for (task_id, _), r in zip(endpoints, responses)
}
return responses_dic | Prepare the request list and execute them concurrently. | train | https://github.com/reincubate/ricloud/blob/e46bce4529fbdca34a4190c18c7219e937e2b697/ricloud/object_store.py#L62-L78 | [
"def concurrent_get(request_list):\n max_threads = _get_num_threads()\n number_requests = len(request_list)\n if number_requests > 0:\n pool = ThreadPool(number_requests) if number_requests <= max_threads else ThreadPool(max_threads)\n results = pool.map(threaded_get, request_list)\n p... | class ObjectStore(object):
def __init__(self, api):
self.api = api
def go(self):
while True:
time.sleep(1)
pending = self._pending_tasks()
if pending:
self._handle_pending_tasks(pending)
time.sleep(5)
def _pending_tasks(self):
return self.api.pending_tasks.keys() if self.api.pending_tasks else []
@staticmethod
def _check_status_integrity(status):
# Check if everything is ok with the status
if 'result_retrieved' not in status or 'status' not in status:
raise InvalidTaskStatus
if status['result_retrieved']:
raise ResultRetrieved
def _get_task_status(self, pending):
return self.api.task_status(pending[:MAX_PENDING])
def _set_result_in_memory(self, uuid, result):
# Make the result accessible to the other thread
self.api.set_task_result(uuid, result)
def _handle_pending_tasks(self, pending):
task_status_response = self._get_task_status(pending)
task_status_response.pop('success')
retrieval_urls = []
for task_id, task_status in task_status_response.iteritems():
if 'retrieval_endpoint' in task_status:
url = (task_id, task_status['retrieval_endpoint'])
retrieval_urls.append(url)
if retrieval_urls:
results = self._retrieve_result(retrieval_urls, self.api.token_header)
for task_id, task_result in results.iteritems():
self._set_result_in_memory(task_id, task_result)
@staticmethod
|
reincubate/ricloud | ricloud/asmaster_api.py | AsmasterApi._build_endpoint | python | def _build_endpoint(self, endpoint_name):
endpoint_relative = settings.get('asmaster_endpoints', endpoint_name)
return '%s%s' % (self.host, endpoint_relative) | Generate an enpoint url from a setting name.
Args:
endpoint_name(str): setting name for the enpoint to build
Returns:
(str) url enpoint | train | https://github.com/reincubate/ricloud/blob/e46bce4529fbdca34a4190c18c7219e937e2b697/ricloud/asmaster_api.py#L33-L43 | null | class AsmasterApi(object):
"""Primary object that pushes requests into a distinct stream thread."""
def __init__(self, timeout):
self.timeout = timeout
self.host = settings.get('hosts', 'asmaster_host')
self.token = settings.get('auth', 'token')
self.list_services_endpoint = self._build_endpoint('list_services')
self.list_subscriptions_endpoint = self._build_endpoint('list_subscriptions')
self.subscribe_account_endpoint = self._build_endpoint('subscribe_account')
self.perform_2fa_challenge_endpoint = self._build_endpoint('perform_2fa_challenge')
self.submit_2fa_challenge_endpoint = self._build_endpoint('submit_2fa_challenge')
self.list_devices_endpoint = self._build_endpoint('list_devices')
self.subscribe_device_endpoint = self._build_endpoint('subscribe_device')
self.resubscribe_account_endpoint = self._build_endpoint('resubscribe_account')
self.unsubscribe_device_endpoint = self._build_endpoint('unsubscribe_device')
self.unsubscribe_account_endpoint = self._build_endpoint('unsubscribe_account')
self.reset_subscription_since_endpoint = self._build_endpoint('reset_subscription_since')
self.services = {}
@property
def token_header(self):
return {
'Authorization': 'Token %s' % self.token,
}
@staticmethod
def _parse_endpoint(endpoint):
"""Expect endpoint to be dictionary containing `protocol`, `host` and `uri` keys."""
return "{protocol}://{host}{uri}".format(**endpoint)
def _set_endpoints(self, info):
self.stream_endpoints = info['stream_endpoints']
def _set_allowed_services_and_actions(self, services):
"""Expect services to be a list of service dictionaries, each with `name` and `actions` keys."""
for service in services:
self.services[service['name']] = {}
for action in service['actions']:
name = action.pop('name')
self.services[service['name']][name] = action
def setup(self):
info = self.list_services()
self._set_endpoints(info)
self.retrieval_protocol = None
self._set_allowed_services_and_actions(info['services'])
def allowed_services(self):
return self.services.keys()
def allowed_actions(self, service_name):
return self.services[service_name].keys()
def list_services(self):
"""Asks for a list of all services."""
return self._perform_post_request(self.list_services_endpoint, {}, self.token_header)
def list_subscriptions(self, service):
"""Asks for a list of all subscribed accounts and devices, along with their statuses."""
data = {
'service': service,
}
return self._perform_post_request(self.list_subscriptions_endpoint, data, self.token_header)
def subscribe_account(self, username, password, service):
"""Subscribe an account for a service.
"""
data = {
'service': service,
'username': username,
'password': password,
}
return self._perform_post_request(self.subscribe_account_endpoint, data, self.token_header)
def perform_2fa_challenge(self, account_id, device_id):
data = {
'account_id': account_id,
'device_id': device_id,
}
return self._perform_post_request(self.perform_2fa_challenge_endpoint, data, self.token_header)
def submit_2fa_challenge(self, account_id, code):
data = {
'account_id': account_id,
'code': code,
}
return self._perform_post_request(self.submit_2fa_challenge_endpoint, data, self.token_header)
def resubscribe_account(self, account_id, password):
data = {
'account_id': account_id,
'password': password,
}
return self._perform_post_request(self.resubscribe_account_endpoint, data, self.token_header)
def unsubscribe_account(self, account_id):
data = {
'account_id': account_id,
}
return self._perform_post_request(self.unsubscribe_account_endpoint, data, self.token_header)
def list_devices(self, account_id):
data = {
'account_id': account_id,
}
return self._perform_post_request(self.list_devices_endpoint, data, self.token_header)
def subscribe_device(self, account_id, device_id):
data = {
'account_id': account_id,
'device_id': device_id,
}
return self._perform_post_request(self.subscribe_device_endpoint, data, self.token_header)
def unsubscribe_device(self, account_id, device_id):
data = {
'account_id': account_id,
'device_id': device_id,
}
return self._perform_post_request(self.unsubscribe_device_endpoint, data, self.token_header)
def reset_subscription_since(self, account_id, datetime_str):
"""Handler for `--reset-subscription-since` command.
Args:
account_id(int): id of the account to reset.
datetime_str(str): string representing the datetime used in the
next poll to retrieve data since.
Returns:
(str) json encoded response.
NOTES:
We don't care about validation here, we demand the responsibility to
the backend.
"""
data = {
'account_id': account_id,
'datetime': datetime_str,
}
return self._perform_post_request(self.reset_subscription_since_endpoint, data, self.token_header)
@staticmethod
def _parse_response(response, post_request=False):
"""Treat the response from ASApi.
The json is dumped before checking the status as even if the response is
not properly formed we are in trouble.
"""
try:
data = response.json()
except:
msg = 'Unhandled HTTP %s response, shown truncated below:\n%s...' % (
response.status_code, response.text[:50]
)
raise ValueError(msg)
if not response.ok:
utils.error_message_and_exit(None, data)
if post_request and not data['success']:
raise Exception('Asmaster Api Error: [%s]' % data['error'])
return data
def _perform_get_request(self, url, headers=None):
response = requests.get(
url,
headers=headers,
timeout=self.timeout,
)
return self._parse_response(response)
def _perform_post_request(self, url, data, headers=None):
response = requests.post(
url,
data=data,
headers=headers,
timeout=self.timeout,
)
return self._parse_response(response, post_request=True)
|
reincubate/ricloud | ricloud/asmaster_api.py | AsmasterApi._set_allowed_services_and_actions | python | def _set_allowed_services_and_actions(self, services):
for service in services:
self.services[service['name']] = {}
for action in service['actions']:
name = action.pop('name')
self.services[service['name']][name] = action | Expect services to be a list of service dictionaries, each with `name` and `actions` keys. | train | https://github.com/reincubate/ricloud/blob/e46bce4529fbdca34a4190c18c7219e937e2b697/ricloud/asmaster_api.py#L59-L66 | null | class AsmasterApi(object):
"""Primary object that pushes requests into a distinct stream thread."""
def __init__(self, timeout):
self.timeout = timeout
self.host = settings.get('hosts', 'asmaster_host')
self.token = settings.get('auth', 'token')
self.list_services_endpoint = self._build_endpoint('list_services')
self.list_subscriptions_endpoint = self._build_endpoint('list_subscriptions')
self.subscribe_account_endpoint = self._build_endpoint('subscribe_account')
self.perform_2fa_challenge_endpoint = self._build_endpoint('perform_2fa_challenge')
self.submit_2fa_challenge_endpoint = self._build_endpoint('submit_2fa_challenge')
self.list_devices_endpoint = self._build_endpoint('list_devices')
self.subscribe_device_endpoint = self._build_endpoint('subscribe_device')
self.resubscribe_account_endpoint = self._build_endpoint('resubscribe_account')
self.unsubscribe_device_endpoint = self._build_endpoint('unsubscribe_device')
self.unsubscribe_account_endpoint = self._build_endpoint('unsubscribe_account')
self.reset_subscription_since_endpoint = self._build_endpoint('reset_subscription_since')
self.services = {}
def _build_endpoint(self, endpoint_name):
"""Generate an enpoint url from a setting name.
Args:
endpoint_name(str): setting name for the enpoint to build
Returns:
(str) url enpoint
"""
endpoint_relative = settings.get('asmaster_endpoints', endpoint_name)
return '%s%s' % (self.host, endpoint_relative)
@property
def token_header(self):
return {
'Authorization': 'Token %s' % self.token,
}
@staticmethod
def _parse_endpoint(endpoint):
"""Expect endpoint to be dictionary containing `protocol`, `host` and `uri` keys."""
return "{protocol}://{host}{uri}".format(**endpoint)
def _set_endpoints(self, info):
self.stream_endpoints = info['stream_endpoints']
def setup(self):
info = self.list_services()
self._set_endpoints(info)
self.retrieval_protocol = None
self._set_allowed_services_and_actions(info['services'])
def allowed_services(self):
return self.services.keys()
def allowed_actions(self, service_name):
return self.services[service_name].keys()
def list_services(self):
"""Asks for a list of all services."""
return self._perform_post_request(self.list_services_endpoint, {}, self.token_header)
def list_subscriptions(self, service):
"""Asks for a list of all subscribed accounts and devices, along with their statuses."""
data = {
'service': service,
}
return self._perform_post_request(self.list_subscriptions_endpoint, data, self.token_header)
def subscribe_account(self, username, password, service):
"""Subscribe an account for a service.
"""
data = {
'service': service,
'username': username,
'password': password,
}
return self._perform_post_request(self.subscribe_account_endpoint, data, self.token_header)
def perform_2fa_challenge(self, account_id, device_id):
data = {
'account_id': account_id,
'device_id': device_id,
}
return self._perform_post_request(self.perform_2fa_challenge_endpoint, data, self.token_header)
def submit_2fa_challenge(self, account_id, code):
data = {
'account_id': account_id,
'code': code,
}
return self._perform_post_request(self.submit_2fa_challenge_endpoint, data, self.token_header)
def resubscribe_account(self, account_id, password):
data = {
'account_id': account_id,
'password': password,
}
return self._perform_post_request(self.resubscribe_account_endpoint, data, self.token_header)
def unsubscribe_account(self, account_id):
data = {
'account_id': account_id,
}
return self._perform_post_request(self.unsubscribe_account_endpoint, data, self.token_header)
def list_devices(self, account_id):
data = {
'account_id': account_id,
}
return self._perform_post_request(self.list_devices_endpoint, data, self.token_header)
def subscribe_device(self, account_id, device_id):
data = {
'account_id': account_id,
'device_id': device_id,
}
return self._perform_post_request(self.subscribe_device_endpoint, data, self.token_header)
def unsubscribe_device(self, account_id, device_id):
data = {
'account_id': account_id,
'device_id': device_id,
}
return self._perform_post_request(self.unsubscribe_device_endpoint, data, self.token_header)
def reset_subscription_since(self, account_id, datetime_str):
"""Handler for `--reset-subscription-since` command.
Args:
account_id(int): id of the account to reset.
datetime_str(str): string representing the datetime used in the
next poll to retrieve data since.
Returns:
(str) json encoded response.
NOTES:
We don't care about validation here, we demand the responsibility to
the backend.
"""
data = {
'account_id': account_id,
'datetime': datetime_str,
}
return self._perform_post_request(self.reset_subscription_since_endpoint, data, self.token_header)
@staticmethod
def _parse_response(response, post_request=False):
"""Treat the response from ASApi.
The json is dumped before checking the status as even if the response is
not properly formed we are in trouble.
"""
try:
data = response.json()
except:
msg = 'Unhandled HTTP %s response, shown truncated below:\n%s...' % (
response.status_code, response.text[:50]
)
raise ValueError(msg)
if not response.ok:
utils.error_message_and_exit(None, data)
if post_request and not data['success']:
raise Exception('Asmaster Api Error: [%s]' % data['error'])
return data
def _perform_get_request(self, url, headers=None):
response = requests.get(
url,
headers=headers,
timeout=self.timeout,
)
return self._parse_response(response)
def _perform_post_request(self, url, data, headers=None):
response = requests.post(
url,
data=data,
headers=headers,
timeout=self.timeout,
)
return self._parse_response(response, post_request=True)
|
reincubate/ricloud | ricloud/asmaster_api.py | AsmasterApi.list_subscriptions | python | def list_subscriptions(self, service):
data = {
'service': service,
}
return self._perform_post_request(self.list_subscriptions_endpoint, data, self.token_header) | Asks for a list of all subscribed accounts and devices, along with their statuses. | train | https://github.com/reincubate/ricloud/blob/e46bce4529fbdca34a4190c18c7219e937e2b697/ricloud/asmaster_api.py#L84-L89 | [
"def _perform_post_request(self, url, data, headers=None):\n response = requests.post(\n url,\n data=data,\n headers=headers,\n timeout=self.timeout,\n )\n return self._parse_response(response, post_request=True)\n"
] | class AsmasterApi(object):
"""Primary object that pushes requests into a distinct stream thread."""
def __init__(self, timeout):
self.timeout = timeout
self.host = settings.get('hosts', 'asmaster_host')
self.token = settings.get('auth', 'token')
self.list_services_endpoint = self._build_endpoint('list_services')
self.list_subscriptions_endpoint = self._build_endpoint('list_subscriptions')
self.subscribe_account_endpoint = self._build_endpoint('subscribe_account')
self.perform_2fa_challenge_endpoint = self._build_endpoint('perform_2fa_challenge')
self.submit_2fa_challenge_endpoint = self._build_endpoint('submit_2fa_challenge')
self.list_devices_endpoint = self._build_endpoint('list_devices')
self.subscribe_device_endpoint = self._build_endpoint('subscribe_device')
self.resubscribe_account_endpoint = self._build_endpoint('resubscribe_account')
self.unsubscribe_device_endpoint = self._build_endpoint('unsubscribe_device')
self.unsubscribe_account_endpoint = self._build_endpoint('unsubscribe_account')
self.reset_subscription_since_endpoint = self._build_endpoint('reset_subscription_since')
self.services = {}
def _build_endpoint(self, endpoint_name):
"""Generate an enpoint url from a setting name.
Args:
endpoint_name(str): setting name for the enpoint to build
Returns:
(str) url enpoint
"""
endpoint_relative = settings.get('asmaster_endpoints', endpoint_name)
return '%s%s' % (self.host, endpoint_relative)
@property
def token_header(self):
return {
'Authorization': 'Token %s' % self.token,
}
@staticmethod
def _parse_endpoint(endpoint):
"""Expect endpoint to be dictionary containing `protocol`, `host` and `uri` keys."""
return "{protocol}://{host}{uri}".format(**endpoint)
def _set_endpoints(self, info):
self.stream_endpoints = info['stream_endpoints']
def _set_allowed_services_and_actions(self, services):
"""Expect services to be a list of service dictionaries, each with `name` and `actions` keys."""
for service in services:
self.services[service['name']] = {}
for action in service['actions']:
name = action.pop('name')
self.services[service['name']][name] = action
def setup(self):
info = self.list_services()
self._set_endpoints(info)
self.retrieval_protocol = None
self._set_allowed_services_and_actions(info['services'])
def allowed_services(self):
return self.services.keys()
def allowed_actions(self, service_name):
return self.services[service_name].keys()
def list_services(self):
"""Asks for a list of all services."""
return self._perform_post_request(self.list_services_endpoint, {}, self.token_header)
def subscribe_account(self, username, password, service):
"""Subscribe an account for a service.
"""
data = {
'service': service,
'username': username,
'password': password,
}
return self._perform_post_request(self.subscribe_account_endpoint, data, self.token_header)
def perform_2fa_challenge(self, account_id, device_id):
data = {
'account_id': account_id,
'device_id': device_id,
}
return self._perform_post_request(self.perform_2fa_challenge_endpoint, data, self.token_header)
def submit_2fa_challenge(self, account_id, code):
data = {
'account_id': account_id,
'code': code,
}
return self._perform_post_request(self.submit_2fa_challenge_endpoint, data, self.token_header)
def resubscribe_account(self, account_id, password):
data = {
'account_id': account_id,
'password': password,
}
return self._perform_post_request(self.resubscribe_account_endpoint, data, self.token_header)
def unsubscribe_account(self, account_id):
data = {
'account_id': account_id,
}
return self._perform_post_request(self.unsubscribe_account_endpoint, data, self.token_header)
def list_devices(self, account_id):
data = {
'account_id': account_id,
}
return self._perform_post_request(self.list_devices_endpoint, data, self.token_header)
def subscribe_device(self, account_id, device_id):
data = {
'account_id': account_id,
'device_id': device_id,
}
return self._perform_post_request(self.subscribe_device_endpoint, data, self.token_header)
def unsubscribe_device(self, account_id, device_id):
data = {
'account_id': account_id,
'device_id': device_id,
}
return self._perform_post_request(self.unsubscribe_device_endpoint, data, self.token_header)
def reset_subscription_since(self, account_id, datetime_str):
"""Handler for `--reset-subscription-since` command.
Args:
account_id(int): id of the account to reset.
datetime_str(str): string representing the datetime used in the
next poll to retrieve data since.
Returns:
(str) json encoded response.
NOTES:
We don't care about validation here, we demand the responsibility to
the backend.
"""
data = {
'account_id': account_id,
'datetime': datetime_str,
}
return self._perform_post_request(self.reset_subscription_since_endpoint, data, self.token_header)
@staticmethod
def _parse_response(response, post_request=False):
"""Treat the response from ASApi.
The json is dumped before checking the status as even if the response is
not properly formed we are in trouble.
"""
try:
data = response.json()
except:
msg = 'Unhandled HTTP %s response, shown truncated below:\n%s...' % (
response.status_code, response.text[:50]
)
raise ValueError(msg)
if not response.ok:
utils.error_message_and_exit(None, data)
if post_request and not data['success']:
raise Exception('Asmaster Api Error: [%s]' % data['error'])
return data
def _perform_get_request(self, url, headers=None):
response = requests.get(
url,
headers=headers,
timeout=self.timeout,
)
return self._parse_response(response)
def _perform_post_request(self, url, data, headers=None):
response = requests.post(
url,
data=data,
headers=headers,
timeout=self.timeout,
)
return self._parse_response(response, post_request=True)
|
reincubate/ricloud | ricloud/asmaster_api.py | AsmasterApi.subscribe_account | python | def subscribe_account(self, username, password, service):
data = {
'service': service,
'username': username,
'password': password,
}
return self._perform_post_request(self.subscribe_account_endpoint, data, self.token_header) | Subscribe an account for a service. | train | https://github.com/reincubate/ricloud/blob/e46bce4529fbdca34a4190c18c7219e937e2b697/ricloud/asmaster_api.py#L91-L100 | [
"def _perform_post_request(self, url, data, headers=None):\n response = requests.post(\n url,\n data=data,\n headers=headers,\n timeout=self.timeout,\n )\n return self._parse_response(response, post_request=True)\n"
] | class AsmasterApi(object):
"""Primary object that pushes requests into a distinct stream thread."""
def __init__(self, timeout):
self.timeout = timeout
self.host = settings.get('hosts', 'asmaster_host')
self.token = settings.get('auth', 'token')
self.list_services_endpoint = self._build_endpoint('list_services')
self.list_subscriptions_endpoint = self._build_endpoint('list_subscriptions')
self.subscribe_account_endpoint = self._build_endpoint('subscribe_account')
self.perform_2fa_challenge_endpoint = self._build_endpoint('perform_2fa_challenge')
self.submit_2fa_challenge_endpoint = self._build_endpoint('submit_2fa_challenge')
self.list_devices_endpoint = self._build_endpoint('list_devices')
self.subscribe_device_endpoint = self._build_endpoint('subscribe_device')
self.resubscribe_account_endpoint = self._build_endpoint('resubscribe_account')
self.unsubscribe_device_endpoint = self._build_endpoint('unsubscribe_device')
self.unsubscribe_account_endpoint = self._build_endpoint('unsubscribe_account')
self.reset_subscription_since_endpoint = self._build_endpoint('reset_subscription_since')
self.services = {}
def _build_endpoint(self, endpoint_name):
"""Generate an enpoint url from a setting name.
Args:
endpoint_name(str): setting name for the enpoint to build
Returns:
(str) url enpoint
"""
endpoint_relative = settings.get('asmaster_endpoints', endpoint_name)
return '%s%s' % (self.host, endpoint_relative)
@property
def token_header(self):
return {
'Authorization': 'Token %s' % self.token,
}
@staticmethod
def _parse_endpoint(endpoint):
"""Expect endpoint to be dictionary containing `protocol`, `host` and `uri` keys."""
return "{protocol}://{host}{uri}".format(**endpoint)
def _set_endpoints(self, info):
self.stream_endpoints = info['stream_endpoints']
def _set_allowed_services_and_actions(self, services):
"""Expect services to be a list of service dictionaries, each with `name` and `actions` keys."""
for service in services:
self.services[service['name']] = {}
for action in service['actions']:
name = action.pop('name')
self.services[service['name']][name] = action
def setup(self):
info = self.list_services()
self._set_endpoints(info)
self.retrieval_protocol = None
self._set_allowed_services_and_actions(info['services'])
def allowed_services(self):
return self.services.keys()
def allowed_actions(self, service_name):
return self.services[service_name].keys()
def list_services(self):
"""Asks for a list of all services."""
return self._perform_post_request(self.list_services_endpoint, {}, self.token_header)
def list_subscriptions(self, service):
"""Asks for a list of all subscribed accounts and devices, along with their statuses."""
data = {
'service': service,
}
return self._perform_post_request(self.list_subscriptions_endpoint, data, self.token_header)
def perform_2fa_challenge(self, account_id, device_id):
data = {
'account_id': account_id,
'device_id': device_id,
}
return self._perform_post_request(self.perform_2fa_challenge_endpoint, data, self.token_header)
def submit_2fa_challenge(self, account_id, code):
data = {
'account_id': account_id,
'code': code,
}
return self._perform_post_request(self.submit_2fa_challenge_endpoint, data, self.token_header)
def resubscribe_account(self, account_id, password):
data = {
'account_id': account_id,
'password': password,
}
return self._perform_post_request(self.resubscribe_account_endpoint, data, self.token_header)
def unsubscribe_account(self, account_id):
data = {
'account_id': account_id,
}
return self._perform_post_request(self.unsubscribe_account_endpoint, data, self.token_header)
def list_devices(self, account_id):
data = {
'account_id': account_id,
}
return self._perform_post_request(self.list_devices_endpoint, data, self.token_header)
def subscribe_device(self, account_id, device_id):
data = {
'account_id': account_id,
'device_id': device_id,
}
return self._perform_post_request(self.subscribe_device_endpoint, data, self.token_header)
def unsubscribe_device(self, account_id, device_id):
data = {
'account_id': account_id,
'device_id': device_id,
}
return self._perform_post_request(self.unsubscribe_device_endpoint, data, self.token_header)
def reset_subscription_since(self, account_id, datetime_str):
"""Handler for `--reset-subscription-since` command.
Args:
account_id(int): id of the account to reset.
datetime_str(str): string representing the datetime used in the
next poll to retrieve data since.
Returns:
(str) json encoded response.
NOTES:
We don't care about validation here, we demand the responsibility to
the backend.
"""
data = {
'account_id': account_id,
'datetime': datetime_str,
}
return self._perform_post_request(self.reset_subscription_since_endpoint, data, self.token_header)
@staticmethod
def _parse_response(response, post_request=False):
"""Treat the response from ASApi.
The json is dumped before checking the status as even if the response is
not properly formed we are in trouble.
"""
try:
data = response.json()
except:
msg = 'Unhandled HTTP %s response, shown truncated below:\n%s...' % (
response.status_code, response.text[:50]
)
raise ValueError(msg)
if not response.ok:
utils.error_message_and_exit(None, data)
if post_request and not data['success']:
raise Exception('Asmaster Api Error: [%s]' % data['error'])
return data
def _perform_get_request(self, url, headers=None):
response = requests.get(
url,
headers=headers,
timeout=self.timeout,
)
return self._parse_response(response)
def _perform_post_request(self, url, data, headers=None):
response = requests.post(
url,
data=data,
headers=headers,
timeout=self.timeout,
)
return self._parse_response(response, post_request=True)
|
reincubate/ricloud | ricloud/asmaster_api.py | AsmasterApi.reset_subscription_since | python | def reset_subscription_since(self, account_id, datetime_str):
data = {
'account_id': account_id,
'datetime': datetime_str,
}
return self._perform_post_request(self.reset_subscription_since_endpoint, data, self.token_header) | Handler for `--reset-subscription-since` command.
Args:
account_id(int): id of the account to reset.
datetime_str(str): string representing the datetime used in the
next poll to retrieve data since.
Returns:
(str) json encoded response.
NOTES:
We don't care about validation here, we demand the responsibility to
the backend. | train | https://github.com/reincubate/ricloud/blob/e46bce4529fbdca34a4190c18c7219e937e2b697/ricloud/asmaster_api.py#L154-L173 | [
"def _perform_post_request(self, url, data, headers=None):\n response = requests.post(\n url,\n data=data,\n headers=headers,\n timeout=self.timeout,\n )\n return self._parse_response(response, post_request=True)\n"
] | class AsmasterApi(object):
"""Primary object that pushes requests into a distinct stream thread."""
def __init__(self, timeout):
self.timeout = timeout
self.host = settings.get('hosts', 'asmaster_host')
self.token = settings.get('auth', 'token')
self.list_services_endpoint = self._build_endpoint('list_services')
self.list_subscriptions_endpoint = self._build_endpoint('list_subscriptions')
self.subscribe_account_endpoint = self._build_endpoint('subscribe_account')
self.perform_2fa_challenge_endpoint = self._build_endpoint('perform_2fa_challenge')
self.submit_2fa_challenge_endpoint = self._build_endpoint('submit_2fa_challenge')
self.list_devices_endpoint = self._build_endpoint('list_devices')
self.subscribe_device_endpoint = self._build_endpoint('subscribe_device')
self.resubscribe_account_endpoint = self._build_endpoint('resubscribe_account')
self.unsubscribe_device_endpoint = self._build_endpoint('unsubscribe_device')
self.unsubscribe_account_endpoint = self._build_endpoint('unsubscribe_account')
self.reset_subscription_since_endpoint = self._build_endpoint('reset_subscription_since')
self.services = {}
def _build_endpoint(self, endpoint_name):
"""Generate an enpoint url from a setting name.
Args:
endpoint_name(str): setting name for the enpoint to build
Returns:
(str) url enpoint
"""
endpoint_relative = settings.get('asmaster_endpoints', endpoint_name)
return '%s%s' % (self.host, endpoint_relative)
@property
def token_header(self):
return {
'Authorization': 'Token %s' % self.token,
}
@staticmethod
def _parse_endpoint(endpoint):
"""Expect endpoint to be dictionary containing `protocol`, `host` and `uri` keys."""
return "{protocol}://{host}{uri}".format(**endpoint)
def _set_endpoints(self, info):
self.stream_endpoints = info['stream_endpoints']
def _set_allowed_services_and_actions(self, services):
"""Expect services to be a list of service dictionaries, each with `name` and `actions` keys."""
for service in services:
self.services[service['name']] = {}
for action in service['actions']:
name = action.pop('name')
self.services[service['name']][name] = action
def setup(self):
info = self.list_services()
self._set_endpoints(info)
self.retrieval_protocol = None
self._set_allowed_services_and_actions(info['services'])
def allowed_services(self):
return self.services.keys()
def allowed_actions(self, service_name):
return self.services[service_name].keys()
def list_services(self):
"""Asks for a list of all services."""
return self._perform_post_request(self.list_services_endpoint, {}, self.token_header)
def list_subscriptions(self, service):
"""Asks for a list of all subscribed accounts and devices, along with their statuses."""
data = {
'service': service,
}
return self._perform_post_request(self.list_subscriptions_endpoint, data, self.token_header)
def subscribe_account(self, username, password, service):
"""Subscribe an account for a service.
"""
data = {
'service': service,
'username': username,
'password': password,
}
return self._perform_post_request(self.subscribe_account_endpoint, data, self.token_header)
def perform_2fa_challenge(self, account_id, device_id):
data = {
'account_id': account_id,
'device_id': device_id,
}
return self._perform_post_request(self.perform_2fa_challenge_endpoint, data, self.token_header)
def submit_2fa_challenge(self, account_id, code):
data = {
'account_id': account_id,
'code': code,
}
return self._perform_post_request(self.submit_2fa_challenge_endpoint, data, self.token_header)
def resubscribe_account(self, account_id, password):
data = {
'account_id': account_id,
'password': password,
}
return self._perform_post_request(self.resubscribe_account_endpoint, data, self.token_header)
def unsubscribe_account(self, account_id):
data = {
'account_id': account_id,
}
return self._perform_post_request(self.unsubscribe_account_endpoint, data, self.token_header)
def list_devices(self, account_id):
data = {
'account_id': account_id,
}
return self._perform_post_request(self.list_devices_endpoint, data, self.token_header)
def subscribe_device(self, account_id, device_id):
data = {
'account_id': account_id,
'device_id': device_id,
}
return self._perform_post_request(self.subscribe_device_endpoint, data, self.token_header)
def unsubscribe_device(self, account_id, device_id):
data = {
'account_id': account_id,
'device_id': device_id,
}
return self._perform_post_request(self.unsubscribe_device_endpoint, data, self.token_header)
@staticmethod
def _parse_response(response, post_request=False):
"""Treat the response from ASApi.
The json is dumped before checking the status as even if the response is
not properly formed we are in trouble.
"""
try:
data = response.json()
except:
msg = 'Unhandled HTTP %s response, shown truncated below:\n%s...' % (
response.status_code, response.text[:50]
)
raise ValueError(msg)
if not response.ok:
utils.error_message_and_exit(None, data)
if post_request and not data['success']:
raise Exception('Asmaster Api Error: [%s]' % data['error'])
return data
def _perform_get_request(self, url, headers=None):
response = requests.get(
url,
headers=headers,
timeout=self.timeout,
)
return self._parse_response(response)
def _perform_post_request(self, url, data, headers=None):
response = requests.post(
url,
data=data,
headers=headers,
timeout=self.timeout,
)
return self._parse_response(response, post_request=True)
|
reincubate/ricloud | ricloud/asmaster_api.py | AsmasterApi._parse_response | python | def _parse_response(response, post_request=False):
try:
data = response.json()
except:
msg = 'Unhandled HTTP %s response, shown truncated below:\n%s...' % (
response.status_code, response.text[:50]
)
raise ValueError(msg)
if not response.ok:
utils.error_message_and_exit(None, data)
if post_request and not data['success']:
raise Exception('Asmaster Api Error: [%s]' % data['error'])
return data | Treat the response from ASApi.
The json is dumped before checking the status as even if the response is
not properly formed we are in trouble. | train | https://github.com/reincubate/ricloud/blob/e46bce4529fbdca34a4190c18c7219e937e2b697/ricloud/asmaster_api.py#L176-L196 | [
"def error_message_and_exit(message, error_result):\n \"\"\"Prints error messages in blue, the failed task result and quits.\"\"\"\n if message:\n error_message(message)\n puts(json.dumps(error_result, indent=2))\n sys.exit(1)\n"
] | class AsmasterApi(object):
"""Primary object that pushes requests into a distinct stream thread."""
def __init__(self, timeout):
self.timeout = timeout
self.host = settings.get('hosts', 'asmaster_host')
self.token = settings.get('auth', 'token')
self.list_services_endpoint = self._build_endpoint('list_services')
self.list_subscriptions_endpoint = self._build_endpoint('list_subscriptions')
self.subscribe_account_endpoint = self._build_endpoint('subscribe_account')
self.perform_2fa_challenge_endpoint = self._build_endpoint('perform_2fa_challenge')
self.submit_2fa_challenge_endpoint = self._build_endpoint('submit_2fa_challenge')
self.list_devices_endpoint = self._build_endpoint('list_devices')
self.subscribe_device_endpoint = self._build_endpoint('subscribe_device')
self.resubscribe_account_endpoint = self._build_endpoint('resubscribe_account')
self.unsubscribe_device_endpoint = self._build_endpoint('unsubscribe_device')
self.unsubscribe_account_endpoint = self._build_endpoint('unsubscribe_account')
self.reset_subscription_since_endpoint = self._build_endpoint('reset_subscription_since')
self.services = {}
def _build_endpoint(self, endpoint_name):
"""Generate an enpoint url from a setting name.
Args:
endpoint_name(str): setting name for the enpoint to build
Returns:
(str) url enpoint
"""
endpoint_relative = settings.get('asmaster_endpoints', endpoint_name)
return '%s%s' % (self.host, endpoint_relative)
@property
def token_header(self):
return {
'Authorization': 'Token %s' % self.token,
}
@staticmethod
def _parse_endpoint(endpoint):
"""Expect endpoint to be dictionary containing `protocol`, `host` and `uri` keys."""
return "{protocol}://{host}{uri}".format(**endpoint)
def _set_endpoints(self, info):
self.stream_endpoints = info['stream_endpoints']
def _set_allowed_services_and_actions(self, services):
"""Expect services to be a list of service dictionaries, each with `name` and `actions` keys."""
for service in services:
self.services[service['name']] = {}
for action in service['actions']:
name = action.pop('name')
self.services[service['name']][name] = action
def setup(self):
info = self.list_services()
self._set_endpoints(info)
self.retrieval_protocol = None
self._set_allowed_services_and_actions(info['services'])
def allowed_services(self):
return self.services.keys()
def allowed_actions(self, service_name):
return self.services[service_name].keys()
def list_services(self):
"""Asks for a list of all services."""
return self._perform_post_request(self.list_services_endpoint, {}, self.token_header)
def list_subscriptions(self, service):
"""Asks for a list of all subscribed accounts and devices, along with their statuses."""
data = {
'service': service,
}
return self._perform_post_request(self.list_subscriptions_endpoint, data, self.token_header)
def subscribe_account(self, username, password, service):
"""Subscribe an account for a service.
"""
data = {
'service': service,
'username': username,
'password': password,
}
return self._perform_post_request(self.subscribe_account_endpoint, data, self.token_header)
def perform_2fa_challenge(self, account_id, device_id):
data = {
'account_id': account_id,
'device_id': device_id,
}
return self._perform_post_request(self.perform_2fa_challenge_endpoint, data, self.token_header)
def submit_2fa_challenge(self, account_id, code):
data = {
'account_id': account_id,
'code': code,
}
return self._perform_post_request(self.submit_2fa_challenge_endpoint, data, self.token_header)
def resubscribe_account(self, account_id, password):
data = {
'account_id': account_id,
'password': password,
}
return self._perform_post_request(self.resubscribe_account_endpoint, data, self.token_header)
def unsubscribe_account(self, account_id):
data = {
'account_id': account_id,
}
return self._perform_post_request(self.unsubscribe_account_endpoint, data, self.token_header)
def list_devices(self, account_id):
data = {
'account_id': account_id,
}
return self._perform_post_request(self.list_devices_endpoint, data, self.token_header)
def subscribe_device(self, account_id, device_id):
data = {
'account_id': account_id,
'device_id': device_id,
}
return self._perform_post_request(self.subscribe_device_endpoint, data, self.token_header)
def unsubscribe_device(self, account_id, device_id):
data = {
'account_id': account_id,
'device_id': device_id,
}
return self._perform_post_request(self.unsubscribe_device_endpoint, data, self.token_header)
def reset_subscription_since(self, account_id, datetime_str):
"""Handler for `--reset-subscription-since` command.
Args:
account_id(int): id of the account to reset.
datetime_str(str): string representing the datetime used in the
next poll to retrieve data since.
Returns:
(str) json encoded response.
NOTES:
We don't care about validation here, we demand the responsibility to
the backend.
"""
data = {
'account_id': account_id,
'datetime': datetime_str,
}
return self._perform_post_request(self.reset_subscription_since_endpoint, data, self.token_header)
@staticmethod
def _perform_get_request(self, url, headers=None):
response = requests.get(
url,
headers=headers,
timeout=self.timeout,
)
return self._parse_response(response)
def _perform_post_request(self, url, data, headers=None):
response = requests.post(
url,
data=data,
headers=headers,
timeout=self.timeout,
)
return self._parse_response(response, post_request=True)
|
reincubate/ricloud | ricloud/asmaster_listener.py | AsmasterDownloadFileHandler.file_id_to_file_name | python | def file_id_to_file_name(file_id):
if len(file_id) == 40 and re.match("^[a-f0-9]+$", file_id):
return file_id
# prefix with "re_" to avoid name collision with real fileids
return "re_{}".format(hashlib.sha1(file_id).hexdigest()) | Sometimes file ids are not the file names on the device, but are instead generated
by the API. These are not guaranteed to be valid file names so need hashing. | train | https://github.com/reincubate/ricloud/blob/e46bce4529fbdca34a4190c18c7219e937e2b697/ricloud/asmaster_listener.py#L216-L223 | null | class AsmasterDownloadFileHandler(AsmasterHandler):
TYPE = 'download-file'
TABLE = 'file'
QUERY_TEMPLATE = """
INSERT INTO {table}
(`service`, `received`, `account_id`, `device_id`, `device_tag`, `headers`, `location`, `file_id`)
VALUES (
%(service)s, NOW(), %(account_id)s, %(device_id)s,
%(device_tag)s, %(headers)s, %(location)s, %(file_id)s
)
"""
@utils.profile('file handler took', to_log=True)
def on_complete_message(self, header, stream):
task = AsmasterTask(header.get('task_id', 'system'), callback=self.generate_callback())
task.headers = header
task.result = stream
self.api.append_consumed_task(task)
# Tell the `handle` function not to close the stream file.
return True
def generate_callback(self):
@utils.profile('file callback took', to_log=True)
def callback(task):
target_path = self.get_target_path(task.headers)
file_path = utils.save_file_stream_to_target_path(task.result, target_path)
# Close the temp file here as we did not let `handle` do so above.
task.result.close()
file_id = task.headers['file_id']
if len(file_id) > 4096:
raise StreamError("Invalid download file request, file_id is too long")
query = self.QUERY_TEMPLATE.format(table=self.TABLE)
args = {
"service": task.headers['service'],
"account_id": task.headers.get('account_id', None),
"device_id": task.headers.get('device_id', None),
"device_tag": task.headers.get('device_tag', None),
"headers": json.dumps(task.headers),
"location": file_path,
"file_id": file_id,
}
database_handler.handle_query(query, args)
self.api.result_consumed(task.uuid)
return callback
@staticmethod
def get_target_path(headers):
filename = AsmasterDownloadFileHandler.file_id_to_file_name(headers['file_id'])
path = os.path.join(
headers['service'],
str(headers.get('account_id', "None")),
str(headers.get('device_id', "None")),
filename
)
return path
@staticmethod
|
reincubate/ricloud | ricloud/clients/base.py | sync | python | def sync(func):
sync_timeout = 3600 # Match standard synchronous timeout.
def wraps(*args, **kwargs):
task = func(*args, **kwargs)
task.wait_for_result(timeout=sync_timeout)
result = json.loads(task.result)
return result
return wraps | Decorator to make a task synchronous. | train | https://github.com/reincubate/ricloud/blob/e46bce4529fbdca34a4190c18c7219e937e2b697/ricloud/clients/base.py#L4-L14 | null | import json
class BaseClient(object):
def __init__(self, ricloud_client):
self.ricloud = ricloud_client
def register_account(self, account):
return self.ricloud.api.register_account(
username=account,
service=self.service)
def wait_for_pending_tasks(self):
self.ricloud.api.wait_for_results()
|
reincubate/ricloud | ricloud/samples/live_sample.py | SampleLiveICloudApplication.fetch_data | python | def fetch_data(self):
choices = self.available_data
choices.insert(0, 'All')
selected_data_type = utils.select_item(
choices,
'Please select what data to fetch:',
'Available data:',
)
if selected_data_type == 'All':
selected_data_type = ','.join(self.available_data)
utils.pending_message('Performing fetch data task...')
fetch_data_task = self.client.data(
account=self.account,
data=selected_data_type,
)
# Wait here for result as rest of sample app relies on it.
fetch_data_task.wait_for_result(timeout=self.timeout)
fetch_data_result = json.loads(fetch_data_task.result)
# Write the result to file.
task_id = fetch_data_task.uuid
filepath = utils.get_or_create_filepath('%s.json' % task_id)
with open(filepath, 'w') as out:
json.dump(fetch_data_result, out, indent=2)
utils.info_message('Fetch data successful. Output file: %s.json' % task_id)
return fetch_data_result | Prompt for a data type choice and execute the `fetch_data` task.
The results are saved to a file in json format. | train | https://github.com/reincubate/ricloud/blob/e46bce4529fbdca34a4190c18c7219e937e2b697/ricloud/samples/live_sample.py#L22-L58 | [
"def info_message(message):\n \"\"\"Prints info type messages in green.\"\"\"\n print_message(message, colour='green')\n",
"def pending_message(message):\n \"\"\"Prints pending type messages in yellow.\"\"\"\n print_message('\\n' + message, colour='yellow')\n",
"def select_item(items, prompt_instruc... | class SampleLiveICloudApplication(SampleICloudApplication):
display_name = 'Sample Live Data Application'
def run(self):
# Register the account for the iCloud service.
self.client.register_account(self.account)
# Attempt to login to the account.
self.log_in()
# Choose a data type and retrieve it.
self.fetch_data()
# Wait for any pending tasks to complete
self.client.wait_for_pending_tasks()
utils.info_message('All tasks completed')
@property
def available_data(self):
live_feeds = [
'location',
'mobileme_contacts',
'web_browser_history',
'live_call_history',
'live_photos',
]
return [c for c in self.client.available_data
if c in live_feeds]
|
reincubate/ricloud | ricloud/samples/icloud_sample.py | SampleICloudApplication.log_in | python | def log_in(self):
if not self.password:
# Password wasn't give, ask for it now
self.password = getpass.getpass('Password: ')
utils.pending_message('Performing login...')
login_result = self.client.login(
account=self.account,
password=self.password
)
if 'error' in login_result:
self.handle_failed_login(login_result)
utils.info_message('Login successful') | Perform the `log_in` task to setup the API session for future data requests. | train | https://github.com/reincubate/ricloud/blob/e46bce4529fbdca34a4190c18c7219e937e2b697/ricloud/samples/icloud_sample.py#L43-L59 | [
"def info_message(message):\n \"\"\"Prints info type messages in green.\"\"\"\n print_message(message, colour='green')\n",
"def pending_message(message):\n \"\"\"Prints pending type messages in yellow.\"\"\"\n print_message('\\n' + message, colour='yellow')\n",
"def handle_failed_login(self, login_r... | class SampleICloudApplication(object):
display_name = 'Sample iCloud Application'
client_name = iCloudClient
def __init__(self, ricloud_client, payload):
self.client = self.client_name(ricloud_client)
self.account = payload.get('account')
self.password = payload.get('password')
self.timeout = payload.get('timeout')
def run(self):
# Register the account for the iCloud service.
self.client.register_account(self.account)
# Attempt to login to the account.
self.log_in()
# Get the accounts device list.
self.get_devices()
# Choose a device to retrieve data from.
self.prompt_devices_list()
# Choose a data type and retrieve it.
data = self.fetch_data()
# Grab all photos and/or attachments to download
files = self.get_file_ids_to_download(data)
# Send all the the resquests to download the files
if files:
self.download_files(files)
# Wait for any pending tasks to complete
self.client.wait_for_pending_tasks()
utils.info_message('All tasks completed')
@utils.profile('Login completed in')
def handle_failed_login(self, login_result):
"""If Two Factor Authentication (2FA/2SV) is enabled, the initial
login will fail with a predictable error. Catching this error allows us
to begin the authentication process.
Other types of errors can be treated in a similar way.
"""
error_code = login_result.get('error')
if '2fa-required' in error_code:
utils.error_message('Login Failed: 2FA or 2SV is active!')
self.trigger_two_step_login(login_result)
self.finish_two_step_login()
else:
utils.error_message_and_exit('\nLogin Failed', login_result)
def trigger_two_step_login(self, login_result):
utils.info_message('Starting 2FA/2SV authentication process.')
devices = login_result['data']['trustedDevices']
selected_trusted_device = utils.select_item(
devices,
prompt_instruction='Please select a device index:',
prompt_title='This account has 2FA/2SV enabled.\nAn authorization '
'code will be sent to the selected devices.',
output_type=str
)
start_two_fa_result = self.client.start_2fa_auth(
account=self.account,
challenge=selected_trusted_device
)
if 'error' in start_two_fa_result:
utils.error_message_and_exit('2FA Failed', start_two_fa_result)
def finish_two_step_login(self):
utils.info_message('Challenge has been submitted.')
submit_code = utils.prompt_for_input('\nPlease enter the received code:')
utils.pending_message('Sending code...')
finish_two_fa_result = self.client.finish_2fa_auth(
account=self.account,
code=submit_code
)
if 'error' in finish_two_fa_result:
utils.error_message_and_exit('2FA Failed! Wrong code?', finish_two_fa_result)
@utils.profile('Devices retrieved in')
def get_devices(self):
"""Execute the `get_devices` task and store the results in `self.devices`."""
utils.pending_message('Fetching device list...')
get_devices_task = self.client.devices(
account=self.account
)
# We wait for device list info as this sample relies on it next.
get_devices_task.wait_for_result(timeout=self.timeout)
get_devices_result = json.loads(get_devices_task.result)
self.devices = get_devices_result['devices']
utils.info_message('Get devices successful')
def prompt_devices_list(self):
utils.prompt_message('Available devices:')
for index, (device_id, device_info) in enumerate(self.devices.iteritems()):
line = u"{index:2d}: {device_name} ({colour} {name} running iOS {ios_version})"
line = line.format(index=index, **device_info)
with indent(3):
utils.print_message(line)
self.device_id = utils.prompt_for_choice(
self.devices.keys(),
message='Please select a device index:'
)
@utils.profile('Fetch data completed in')
def fetch_data(self):
"""Prompt for a data type choice and execute the `fetch_data` task.
The results are saved to a file in json format.
"""
choices = list(self.client.available_data)
choices.insert(0, 'All')
selected_data_type = utils.select_item(
choices,
'Please select what data to fetch:',
'Available data:',
)
if selected_data_type == 'All':
selected_data_type = ','.join(self.client.available_data)
utils.pending_message('Performing fetch data task...')
fetch_data_task = self.client.data(
account=self.account,
device=self.device_id,
data=selected_data_type,
)
# Wait here for result as rest of sample app relies on it.
fetch_data_task.wait_for_result(timeout=self.timeout)
fetch_data_result = json.loads(fetch_data_task.result)
# Write the result to file.
task_id = fetch_data_task.uuid
filepath = utils.get_or_create_filepath('%s.json' % task_id)
with open(filepath, 'w') as out:
json.dump(fetch_data_result, out, indent=2)
utils.info_message('Fetch data successful. Output file: %s.json' % task_id)
return fetch_data_result
def download_files(self, files):
"""This method uses the `download_file` task to retrieve binary files
such as attachments, images and videos.
Notice that this method does not wait for the tasks it creates to return
a result synchronously.
"""
utils.pending_message(
"Downloading {nfiles} file{plural}...".format(
nfiles=len(files),
plural='s' if len(files) > 1 else ''
))
for file in files:
if 'file_id' not in file:
continue
def build_callback(file):
"""Callback to save a download file result to a file on disk."""
def file_callback(task):
device_name = self.devices[self.device_id]['device_name']
path_chunks = file['file_path'].split('/')
directory = os.path.join('files', device_name, *path_chunks[:-1])
filepath = utils.get_or_create_filepath(file['filename'], directory)
with open(filepath, 'wb') as out:
out.write(task.result)
if settings.getboolean('logging', 'time_profile'):
filepath = utils.append_profile_info(filepath, task.timer)
with indent(4):
utils.print_message(filepath)
return file_callback
self.client.download_file(
account=self.account,
device=self.device_id,
file=file['file_id'],
callback=build_callback(file)
)
@staticmethod
def get_file_ids_to_download(data):
files = []
for data_type, feed in data.iteritems():
if data_type == 'photos':
files += feed
else:
for feed_entry in feed:
if 'attachments' in feed_entry:
files += feed_entry['attachments'] or []
return files
|
reincubate/ricloud | ricloud/samples/icloud_sample.py | SampleICloudApplication.handle_failed_login | python | def handle_failed_login(self, login_result):
error_code = login_result.get('error')
if '2fa-required' in error_code:
utils.error_message('Login Failed: 2FA or 2SV is active!')
self.trigger_two_step_login(login_result)
self.finish_two_step_login()
else:
utils.error_message_and_exit('\nLogin Failed', login_result) | If Two Factor Authentication (2FA/2SV) is enabled, the initial
login will fail with a predictable error. Catching this error allows us
to begin the authentication process.
Other types of errors can be treated in a similar way. | train | https://github.com/reincubate/ricloud/blob/e46bce4529fbdca34a4190c18c7219e937e2b697/ricloud/samples/icloud_sample.py#L61-L74 | [
"def error_message_and_exit(message, error_result):\n \"\"\"Prints error messages in blue, the failed task result and quits.\"\"\"\n if message:\n error_message(message)\n puts(json.dumps(error_result, indent=2))\n sys.exit(1)\n",
"def error_message(message):\n \"\"\"Prints error type messag... | class SampleICloudApplication(object):
display_name = 'Sample iCloud Application'
client_name = iCloudClient
def __init__(self, ricloud_client, payload):
self.client = self.client_name(ricloud_client)
self.account = payload.get('account')
self.password = payload.get('password')
self.timeout = payload.get('timeout')
def run(self):
# Register the account for the iCloud service.
self.client.register_account(self.account)
# Attempt to login to the account.
self.log_in()
# Get the accounts device list.
self.get_devices()
# Choose a device to retrieve data from.
self.prompt_devices_list()
# Choose a data type and retrieve it.
data = self.fetch_data()
# Grab all photos and/or attachments to download
files = self.get_file_ids_to_download(data)
# Send all the the resquests to download the files
if files:
self.download_files(files)
# Wait for any pending tasks to complete
self.client.wait_for_pending_tasks()
utils.info_message('All tasks completed')
@utils.profile('Login completed in')
def log_in(self):
"""Perform the `log_in` task to setup the API session for future data requests."""
if not self.password:
# Password wasn't give, ask for it now
self.password = getpass.getpass('Password: ')
utils.pending_message('Performing login...')
login_result = self.client.login(
account=self.account,
password=self.password
)
if 'error' in login_result:
self.handle_failed_login(login_result)
utils.info_message('Login successful')
def trigger_two_step_login(self, login_result):
utils.info_message('Starting 2FA/2SV authentication process.')
devices = login_result['data']['trustedDevices']
selected_trusted_device = utils.select_item(
devices,
prompt_instruction='Please select a device index:',
prompt_title='This account has 2FA/2SV enabled.\nAn authorization '
'code will be sent to the selected devices.',
output_type=str
)
start_two_fa_result = self.client.start_2fa_auth(
account=self.account,
challenge=selected_trusted_device
)
if 'error' in start_two_fa_result:
utils.error_message_and_exit('2FA Failed', start_two_fa_result)
def finish_two_step_login(self):
utils.info_message('Challenge has been submitted.')
submit_code = utils.prompt_for_input('\nPlease enter the received code:')
utils.pending_message('Sending code...')
finish_two_fa_result = self.client.finish_2fa_auth(
account=self.account,
code=submit_code
)
if 'error' in finish_two_fa_result:
utils.error_message_and_exit('2FA Failed! Wrong code?', finish_two_fa_result)
@utils.profile('Devices retrieved in')
def get_devices(self):
"""Execute the `get_devices` task and store the results in `self.devices`."""
utils.pending_message('Fetching device list...')
get_devices_task = self.client.devices(
account=self.account
)
# We wait for device list info as this sample relies on it next.
get_devices_task.wait_for_result(timeout=self.timeout)
get_devices_result = json.loads(get_devices_task.result)
self.devices = get_devices_result['devices']
utils.info_message('Get devices successful')
def prompt_devices_list(self):
utils.prompt_message('Available devices:')
for index, (device_id, device_info) in enumerate(self.devices.iteritems()):
line = u"{index:2d}: {device_name} ({colour} {name} running iOS {ios_version})"
line = line.format(index=index, **device_info)
with indent(3):
utils.print_message(line)
self.device_id = utils.prompt_for_choice(
self.devices.keys(),
message='Please select a device index:'
)
@utils.profile('Fetch data completed in')
def fetch_data(self):
"""Prompt for a data type choice and execute the `fetch_data` task.
The results are saved to a file in json format.
"""
choices = list(self.client.available_data)
choices.insert(0, 'All')
selected_data_type = utils.select_item(
choices,
'Please select what data to fetch:',
'Available data:',
)
if selected_data_type == 'All':
selected_data_type = ','.join(self.client.available_data)
utils.pending_message('Performing fetch data task...')
fetch_data_task = self.client.data(
account=self.account,
device=self.device_id,
data=selected_data_type,
)
# Wait here for result as rest of sample app relies on it.
fetch_data_task.wait_for_result(timeout=self.timeout)
fetch_data_result = json.loads(fetch_data_task.result)
# Write the result to file.
task_id = fetch_data_task.uuid
filepath = utils.get_or_create_filepath('%s.json' % task_id)
with open(filepath, 'w') as out:
json.dump(fetch_data_result, out, indent=2)
utils.info_message('Fetch data successful. Output file: %s.json' % task_id)
return fetch_data_result
def download_files(self, files):
"""This method uses the `download_file` task to retrieve binary files
such as attachments, images and videos.
Notice that this method does not wait for the tasks it creates to return
a result synchronously.
"""
utils.pending_message(
"Downloading {nfiles} file{plural}...".format(
nfiles=len(files),
plural='s' if len(files) > 1 else ''
))
for file in files:
if 'file_id' not in file:
continue
def build_callback(file):
"""Callback to save a download file result to a file on disk."""
def file_callback(task):
device_name = self.devices[self.device_id]['device_name']
path_chunks = file['file_path'].split('/')
directory = os.path.join('files', device_name, *path_chunks[:-1])
filepath = utils.get_or_create_filepath(file['filename'], directory)
with open(filepath, 'wb') as out:
out.write(task.result)
if settings.getboolean('logging', 'time_profile'):
filepath = utils.append_profile_info(filepath, task.timer)
with indent(4):
utils.print_message(filepath)
return file_callback
self.client.download_file(
account=self.account,
device=self.device_id,
file=file['file_id'],
callback=build_callback(file)
)
@staticmethod
def get_file_ids_to_download(data):
files = []
for data_type, feed in data.iteritems():
if data_type == 'photos':
files += feed
else:
for feed_entry in feed:
if 'attachments' in feed_entry:
files += feed_entry['attachments'] or []
return files
|
reincubate/ricloud | ricloud/samples/icloud_sample.py | SampleICloudApplication.get_devices | python | def get_devices(self):
utils.pending_message('Fetching device list...')
get_devices_task = self.client.devices(
account=self.account
)
# We wait for device list info as this sample relies on it next.
get_devices_task.wait_for_result(timeout=self.timeout)
get_devices_result = json.loads(get_devices_task.result)
self.devices = get_devices_result['devices']
utils.info_message('Get devices successful') | Execute the `get_devices` task and store the results in `self.devices`. | train | https://github.com/reincubate/ricloud/blob/e46bce4529fbdca34a4190c18c7219e937e2b697/ricloud/samples/icloud_sample.py#L113-L127 | [
"def info_message(message):\n \"\"\"Prints info type messages in green.\"\"\"\n print_message(message, colour='green')\n",
"def pending_message(message):\n \"\"\"Prints pending type messages in yellow.\"\"\"\n print_message('\\n' + message, colour='yellow')\n"
] | class SampleICloudApplication(object):
display_name = 'Sample iCloud Application'
client_name = iCloudClient
def __init__(self, ricloud_client, payload):
self.client = self.client_name(ricloud_client)
self.account = payload.get('account')
self.password = payload.get('password')
self.timeout = payload.get('timeout')
def run(self):
# Register the account for the iCloud service.
self.client.register_account(self.account)
# Attempt to login to the account.
self.log_in()
# Get the accounts device list.
self.get_devices()
# Choose a device to retrieve data from.
self.prompt_devices_list()
# Choose a data type and retrieve it.
data = self.fetch_data()
# Grab all photos and/or attachments to download
files = self.get_file_ids_to_download(data)
# Send all the the resquests to download the files
if files:
self.download_files(files)
# Wait for any pending tasks to complete
self.client.wait_for_pending_tasks()
utils.info_message('All tasks completed')
@utils.profile('Login completed in')
def log_in(self):
"""Perform the `log_in` task to setup the API session for future data requests."""
if not self.password:
# Password wasn't give, ask for it now
self.password = getpass.getpass('Password: ')
utils.pending_message('Performing login...')
login_result = self.client.login(
account=self.account,
password=self.password
)
if 'error' in login_result:
self.handle_failed_login(login_result)
utils.info_message('Login successful')
def handle_failed_login(self, login_result):
"""If Two Factor Authentication (2FA/2SV) is enabled, the initial
login will fail with a predictable error. Catching this error allows us
to begin the authentication process.
Other types of errors can be treated in a similar way.
"""
error_code = login_result.get('error')
if '2fa-required' in error_code:
utils.error_message('Login Failed: 2FA or 2SV is active!')
self.trigger_two_step_login(login_result)
self.finish_two_step_login()
else:
utils.error_message_and_exit('\nLogin Failed', login_result)
def trigger_two_step_login(self, login_result):
utils.info_message('Starting 2FA/2SV authentication process.')
devices = login_result['data']['trustedDevices']
selected_trusted_device = utils.select_item(
devices,
prompt_instruction='Please select a device index:',
prompt_title='This account has 2FA/2SV enabled.\nAn authorization '
'code will be sent to the selected devices.',
output_type=str
)
start_two_fa_result = self.client.start_2fa_auth(
account=self.account,
challenge=selected_trusted_device
)
if 'error' in start_two_fa_result:
utils.error_message_and_exit('2FA Failed', start_two_fa_result)
def finish_two_step_login(self):
utils.info_message('Challenge has been submitted.')
submit_code = utils.prompt_for_input('\nPlease enter the received code:')
utils.pending_message('Sending code...')
finish_two_fa_result = self.client.finish_2fa_auth(
account=self.account,
code=submit_code
)
if 'error' in finish_two_fa_result:
utils.error_message_and_exit('2FA Failed! Wrong code?', finish_two_fa_result)
@utils.profile('Devices retrieved in')
def prompt_devices_list(self):
utils.prompt_message('Available devices:')
for index, (device_id, device_info) in enumerate(self.devices.iteritems()):
line = u"{index:2d}: {device_name} ({colour} {name} running iOS {ios_version})"
line = line.format(index=index, **device_info)
with indent(3):
utils.print_message(line)
self.device_id = utils.prompt_for_choice(
self.devices.keys(),
message='Please select a device index:'
)
@utils.profile('Fetch data completed in')
def fetch_data(self):
"""Prompt for a data type choice and execute the `fetch_data` task.
The results are saved to a file in json format.
"""
choices = list(self.client.available_data)
choices.insert(0, 'All')
selected_data_type = utils.select_item(
choices,
'Please select what data to fetch:',
'Available data:',
)
if selected_data_type == 'All':
selected_data_type = ','.join(self.client.available_data)
utils.pending_message('Performing fetch data task...')
fetch_data_task = self.client.data(
account=self.account,
device=self.device_id,
data=selected_data_type,
)
# Wait here for result as rest of sample app relies on it.
fetch_data_task.wait_for_result(timeout=self.timeout)
fetch_data_result = json.loads(fetch_data_task.result)
# Write the result to file.
task_id = fetch_data_task.uuid
filepath = utils.get_or_create_filepath('%s.json' % task_id)
with open(filepath, 'w') as out:
json.dump(fetch_data_result, out, indent=2)
utils.info_message('Fetch data successful. Output file: %s.json' % task_id)
return fetch_data_result
def download_files(self, files):
"""This method uses the `download_file` task to retrieve binary files
such as attachments, images and videos.
Notice that this method does not wait for the tasks it creates to return
a result synchronously.
"""
utils.pending_message(
"Downloading {nfiles} file{plural}...".format(
nfiles=len(files),
plural='s' if len(files) > 1 else ''
))
for file in files:
if 'file_id' not in file:
continue
def build_callback(file):
"""Callback to save a download file result to a file on disk."""
def file_callback(task):
device_name = self.devices[self.device_id]['device_name']
path_chunks = file['file_path'].split('/')
directory = os.path.join('files', device_name, *path_chunks[:-1])
filepath = utils.get_or_create_filepath(file['filename'], directory)
with open(filepath, 'wb') as out:
out.write(task.result)
if settings.getboolean('logging', 'time_profile'):
filepath = utils.append_profile_info(filepath, task.timer)
with indent(4):
utils.print_message(filepath)
return file_callback
self.client.download_file(
account=self.account,
device=self.device_id,
file=file['file_id'],
callback=build_callback(file)
)
@staticmethod
def get_file_ids_to_download(data):
files = []
for data_type, feed in data.iteritems():
if data_type == 'photos':
files += feed
else:
for feed_entry in feed:
if 'attachments' in feed_entry:
files += feed_entry['attachments'] or []
return files
|
reincubate/ricloud | ricloud/samples/icloud_sample.py | SampleICloudApplication.download_files | python | def download_files(self, files):
utils.pending_message(
"Downloading {nfiles} file{plural}...".format(
nfiles=len(files),
plural='s' if len(files) > 1 else ''
))
for file in files:
if 'file_id' not in file:
continue
def build_callback(file):
"""Callback to save a download file result to a file on disk."""
def file_callback(task):
device_name = self.devices[self.device_id]['device_name']
path_chunks = file['file_path'].split('/')
directory = os.path.join('files', device_name, *path_chunks[:-1])
filepath = utils.get_or_create_filepath(file['filename'], directory)
with open(filepath, 'wb') as out:
out.write(task.result)
if settings.getboolean('logging', 'time_profile'):
filepath = utils.append_profile_info(filepath, task.timer)
with indent(4):
utils.print_message(filepath)
return file_callback
self.client.download_file(
account=self.account,
device=self.device_id,
file=file['file_id'],
callback=build_callback(file)
) | This method uses the `download_file` task to retrieve binary files
such as attachments, images and videos.
Notice that this method does not wait for the tasks it creates to return
a result synchronously. | train | https://github.com/reincubate/ricloud/blob/e46bce4529fbdca34a4190c18c7219e937e2b697/ricloud/samples/icloud_sample.py#L183-L226 | [
"def pending_message(message):\n \"\"\"Prints pending type messages in yellow.\"\"\"\n print_message('\\n' + message, colour='yellow')\n",
"def build_callback(file):\n \"\"\"Callback to save a download file result to a file on disk.\"\"\"\n def file_callback(task):\n device_name = self.devices[... | class SampleICloudApplication(object):
display_name = 'Sample iCloud Application'
client_name = iCloudClient
def __init__(self, ricloud_client, payload):
self.client = self.client_name(ricloud_client)
self.account = payload.get('account')
self.password = payload.get('password')
self.timeout = payload.get('timeout')
def run(self):
# Register the account for the iCloud service.
self.client.register_account(self.account)
# Attempt to login to the account.
self.log_in()
# Get the accounts device list.
self.get_devices()
# Choose a device to retrieve data from.
self.prompt_devices_list()
# Choose a data type and retrieve it.
data = self.fetch_data()
# Grab all photos and/or attachments to download
files = self.get_file_ids_to_download(data)
# Send all the the resquests to download the files
if files:
self.download_files(files)
# Wait for any pending tasks to complete
self.client.wait_for_pending_tasks()
utils.info_message('All tasks completed')
@utils.profile('Login completed in')
def log_in(self):
"""Perform the `log_in` task to setup the API session for future data requests."""
if not self.password:
# Password wasn't give, ask for it now
self.password = getpass.getpass('Password: ')
utils.pending_message('Performing login...')
login_result = self.client.login(
account=self.account,
password=self.password
)
if 'error' in login_result:
self.handle_failed_login(login_result)
utils.info_message('Login successful')
def handle_failed_login(self, login_result):
"""If Two Factor Authentication (2FA/2SV) is enabled, the initial
login will fail with a predictable error. Catching this error allows us
to begin the authentication process.
Other types of errors can be treated in a similar way.
"""
error_code = login_result.get('error')
if '2fa-required' in error_code:
utils.error_message('Login Failed: 2FA or 2SV is active!')
self.trigger_two_step_login(login_result)
self.finish_two_step_login()
else:
utils.error_message_and_exit('\nLogin Failed', login_result)
def trigger_two_step_login(self, login_result):
utils.info_message('Starting 2FA/2SV authentication process.')
devices = login_result['data']['trustedDevices']
selected_trusted_device = utils.select_item(
devices,
prompt_instruction='Please select a device index:',
prompt_title='This account has 2FA/2SV enabled.\nAn authorization '
'code will be sent to the selected devices.',
output_type=str
)
start_two_fa_result = self.client.start_2fa_auth(
account=self.account,
challenge=selected_trusted_device
)
if 'error' in start_two_fa_result:
utils.error_message_and_exit('2FA Failed', start_two_fa_result)
def finish_two_step_login(self):
utils.info_message('Challenge has been submitted.')
submit_code = utils.prompt_for_input('\nPlease enter the received code:')
utils.pending_message('Sending code...')
finish_two_fa_result = self.client.finish_2fa_auth(
account=self.account,
code=submit_code
)
if 'error' in finish_two_fa_result:
utils.error_message_and_exit('2FA Failed! Wrong code?', finish_two_fa_result)
@utils.profile('Devices retrieved in')
def get_devices(self):
"""Execute the `get_devices` task and store the results in `self.devices`."""
utils.pending_message('Fetching device list...')
get_devices_task = self.client.devices(
account=self.account
)
# We wait for device list info as this sample relies on it next.
get_devices_task.wait_for_result(timeout=self.timeout)
get_devices_result = json.loads(get_devices_task.result)
self.devices = get_devices_result['devices']
utils.info_message('Get devices successful')
def prompt_devices_list(self):
utils.prompt_message('Available devices:')
for index, (device_id, device_info) in enumerate(self.devices.iteritems()):
line = u"{index:2d}: {device_name} ({colour} {name} running iOS {ios_version})"
line = line.format(index=index, **device_info)
with indent(3):
utils.print_message(line)
self.device_id = utils.prompt_for_choice(
self.devices.keys(),
message='Please select a device index:'
)
@utils.profile('Fetch data completed in')
def fetch_data(self):
"""Prompt for a data type choice and execute the `fetch_data` task.
The results are saved to a file in json format.
"""
choices = list(self.client.available_data)
choices.insert(0, 'All')
selected_data_type = utils.select_item(
choices,
'Please select what data to fetch:',
'Available data:',
)
if selected_data_type == 'All':
selected_data_type = ','.join(self.client.available_data)
utils.pending_message('Performing fetch data task...')
fetch_data_task = self.client.data(
account=self.account,
device=self.device_id,
data=selected_data_type,
)
# Wait here for result as rest of sample app relies on it.
fetch_data_task.wait_for_result(timeout=self.timeout)
fetch_data_result = json.loads(fetch_data_task.result)
# Write the result to file.
task_id = fetch_data_task.uuid
filepath = utils.get_or_create_filepath('%s.json' % task_id)
with open(filepath, 'w') as out:
json.dump(fetch_data_result, out, indent=2)
utils.info_message('Fetch data successful. Output file: %s.json' % task_id)
return fetch_data_result
@staticmethod
def get_file_ids_to_download(data):
files = []
for data_type, feed in data.iteritems():
if data_type == 'photos':
files += feed
else:
for feed_entry in feed:
if 'attachments' in feed_entry:
files += feed_entry['attachments'] or []
return files
|
reincubate/ricloud | ricloud/api.py | Api.register_account | python | def register_account(self, username, service):
data = {
'service': service,
'username': username,
}
return self._perform_post_request(self.register_account_endpoint, data, self.token_header) | Register an account against a service.
The account that we're querying must be referenced during any
future task requests - so we know which account to link the task
too. | train | https://github.com/reincubate/ricloud/blob/e46bce4529fbdca34a4190c18c7219e937e2b697/ricloud/api.py#L79-L90 | [
"def _perform_post_request(self, url, data, headers=None):\n response = self.session.post(\n url,\n data=data,\n headers=headers\n )\n\n return self._parse_response(response, post_request=True)\n"
] | class Api(object):
"""Primary object that pushes requests into a distinct stream thread."""
def __init__(self):
self.host = settings.get('hosts', 'api_host')
self.token = settings.get('auth', 'token')
self.account_info_endpoint = '%s%s' % (self.host, settings.get('endpoints', 'account_information'))
self.register_account_endpoint = '%s%s' % (self.host, settings.get('endpoints', 'register_account'))
self.task_status_endpoint = '%s%s' % (self.host, settings.get('endpoints', 'task_status'))
self.results_consumed_endpoint = '%s%s' % (self.host, settings.get('endpoints', 'result_consumed'))
self._pending_tasks = {}
self.services = {}
self.session = requests.Session()
@property
def pending_tasks(self):
return self._pending_tasks
@property
def token_header(self):
return {
'Authorization': 'Token %s' % self.token,
}
def _get_info(self):
"""Fetch account information from ASApi host."""
return self._perform_get_request(self.account_info_endpoint, headers=self.token_header)
@staticmethod
def _parse_endpoint(endpoint):
"""Expect endpoint to be dictionary containing `protocol`, `host` and `uri` keys."""
return "{protocol}://{host}{uri}".format(**endpoint)
def _set_endpoints(self, info):
self.stream_endpoints = [self._parse_endpoint(endpoint) for endpoint in info['stream_endpoints']]
submission = info['task_submission_endpoint']
self.submit_endpoint = self._parse_endpoint(submission)
def _set_allowed_services_and_actions(self, services):
"""Expect services to be a list of service dictionaries, each with `name` and `actions` keys."""
for service in services:
self.services[service['name']] = {}
for action in service['actions']:
name = action.pop('name')
self.services[service['name']][name] = action
def setup(self):
info = self._get_info()
self._set_endpoints(info)
self.retrieval_protocol = info['retrieval_protocol']
self._set_allowed_services_and_actions(info['services'])
def allowed_services(self):
return self.services.keys()
def allowed_actions(self, service_name):
return self.services[service_name].keys()
def perform_task(self, service, task_name, account, payload, callback=None):
"""Submit a task to the API.
The task is executed asyncronously, and a Task object is returned.
"""
data = {
'service': service,
'action': task_name,
'account': account,
}
data.update(payload)
response = self._perform_post_request(self.submit_endpoint, data, self.token_header)
task = Task(uuid=response['task_id'], callback=callback)
self._pending_tasks[task.uuid] = task
return task
def task_status(self, task_id):
"""Find the status of a task."""
data = {
'task_ids': task_id,
}
return self._perform_post_request(self.task_status_endpoint, data, self.token_header)
def result_consumed(self, task_id):
"""Report the result as successfully consumed."""
logger.debug('Sending result consumed message.')
data = {
'task_ids': task_id,
}
return self._perform_post_request(self.results_consumed_endpoint, data, self.token_header)
def wait_for_results(self):
while self._pending_tasks:
time.sleep(0.1)
def set_task_result(self, task_id, result):
if task_id not in self._pending_tasks:
time.sleep(1)
try:
self._pending_tasks[task_id].result = result
self._pending_tasks.pop(task_id)
self.result_consumed(task_id)
except KeyError:
pass
_consumed_tasks = []
def append_consumed_task(self, task):
self._consumed_tasks.append(task)
def process_results(self):
if self._consumed_tasks:
consumed_task = self._consumed_tasks.pop(0)
consumed_task.trigger_callback()
else:
time.sleep(0.01)
@staticmethod
def _parse_response(response, post_request=False):
"""Treat the response from ASApi.
The json is dumped before checking the status as even if the response is
not properly formed we are in trouble.
TODO: Streamline error checking.
"""
data = response.json()
if not response.ok:
utils.error_message_and_exit('Push Api Error:', data)
if post_request and not data['success']:
raise Exception('Push Api Error: [%s]' % data['error'])
return data
def _perform_get_request(self, url, headers=None):
response = self.session.get(
url,
headers=headers
)
return self._parse_response(response)
def _perform_post_request(self, url, data, headers=None):
response = self.session.post(
url,
data=data,
headers=headers
)
return self._parse_response(response, post_request=True)
|
reincubate/ricloud | ricloud/api.py | Api.perform_task | python | def perform_task(self, service, task_name, account, payload, callback=None):
data = {
'service': service,
'action': task_name,
'account': account,
}
data.update(payload)
response = self._perform_post_request(self.submit_endpoint, data, self.token_header)
task = Task(uuid=response['task_id'], callback=callback)
self._pending_tasks[task.uuid] = task
return task | Submit a task to the API.
The task is executed asyncronously, and a Task object is returned. | train | https://github.com/reincubate/ricloud/blob/e46bce4529fbdca34a4190c18c7219e937e2b697/ricloud/api.py#L92-L108 | [
"def _perform_post_request(self, url, data, headers=None):\n response = self.session.post(\n url,\n data=data,\n headers=headers\n )\n\n return self._parse_response(response, post_request=True)\n"
] | class Api(object):
"""Primary object that pushes requests into a distinct stream thread."""
def __init__(self):
self.host = settings.get('hosts', 'api_host')
self.token = settings.get('auth', 'token')
self.account_info_endpoint = '%s%s' % (self.host, settings.get('endpoints', 'account_information'))
self.register_account_endpoint = '%s%s' % (self.host, settings.get('endpoints', 'register_account'))
self.task_status_endpoint = '%s%s' % (self.host, settings.get('endpoints', 'task_status'))
self.results_consumed_endpoint = '%s%s' % (self.host, settings.get('endpoints', 'result_consumed'))
self._pending_tasks = {}
self.services = {}
self.session = requests.Session()
@property
def pending_tasks(self):
return self._pending_tasks
@property
def token_header(self):
return {
'Authorization': 'Token %s' % self.token,
}
def _get_info(self):
"""Fetch account information from ASApi host."""
return self._perform_get_request(self.account_info_endpoint, headers=self.token_header)
@staticmethod
def _parse_endpoint(endpoint):
"""Expect endpoint to be dictionary containing `protocol`, `host` and `uri` keys."""
return "{protocol}://{host}{uri}".format(**endpoint)
def _set_endpoints(self, info):
self.stream_endpoints = [self._parse_endpoint(endpoint) for endpoint in info['stream_endpoints']]
submission = info['task_submission_endpoint']
self.submit_endpoint = self._parse_endpoint(submission)
def _set_allowed_services_and_actions(self, services):
"""Expect services to be a list of service dictionaries, each with `name` and `actions` keys."""
for service in services:
self.services[service['name']] = {}
for action in service['actions']:
name = action.pop('name')
self.services[service['name']][name] = action
def setup(self):
info = self._get_info()
self._set_endpoints(info)
self.retrieval_protocol = info['retrieval_protocol']
self._set_allowed_services_and_actions(info['services'])
def allowed_services(self):
return self.services.keys()
def allowed_actions(self, service_name):
return self.services[service_name].keys()
def register_account(self, username, service):
"""Register an account against a service.
The account that we're querying must be referenced during any
future task requests - so we know which account to link the task
too.
"""
data = {
'service': service,
'username': username,
}
return self._perform_post_request(self.register_account_endpoint, data, self.token_header)
def task_status(self, task_id):
"""Find the status of a task."""
data = {
'task_ids': task_id,
}
return self._perform_post_request(self.task_status_endpoint, data, self.token_header)
def result_consumed(self, task_id):
"""Report the result as successfully consumed."""
logger.debug('Sending result consumed message.')
data = {
'task_ids': task_id,
}
return self._perform_post_request(self.results_consumed_endpoint, data, self.token_header)
def wait_for_results(self):
while self._pending_tasks:
time.sleep(0.1)
def set_task_result(self, task_id, result):
if task_id not in self._pending_tasks:
time.sleep(1)
try:
self._pending_tasks[task_id].result = result
self._pending_tasks.pop(task_id)
self.result_consumed(task_id)
except KeyError:
pass
_consumed_tasks = []
def append_consumed_task(self, task):
self._consumed_tasks.append(task)
def process_results(self):
if self._consumed_tasks:
consumed_task = self._consumed_tasks.pop(0)
consumed_task.trigger_callback()
else:
time.sleep(0.01)
@staticmethod
def _parse_response(response, post_request=False):
"""Treat the response from ASApi.
The json is dumped before checking the status as even if the response is
not properly formed we are in trouble.
TODO: Streamline error checking.
"""
data = response.json()
if not response.ok:
utils.error_message_and_exit('Push Api Error:', data)
if post_request and not data['success']:
raise Exception('Push Api Error: [%s]' % data['error'])
return data
def _perform_get_request(self, url, headers=None):
response = self.session.get(
url,
headers=headers
)
return self._parse_response(response)
def _perform_post_request(self, url, data, headers=None):
response = self.session.post(
url,
data=data,
headers=headers
)
return self._parse_response(response, post_request=True)
|
reincubate/ricloud | ricloud/api.py | Api.task_status | python | def task_status(self, task_id):
data = {
'task_ids': task_id,
}
return self._perform_post_request(self.task_status_endpoint, data, self.token_header) | Find the status of a task. | train | https://github.com/reincubate/ricloud/blob/e46bce4529fbdca34a4190c18c7219e937e2b697/ricloud/api.py#L110-L115 | [
"def _perform_post_request(self, url, data, headers=None):\n response = self.session.post(\n url,\n data=data,\n headers=headers\n )\n\n return self._parse_response(response, post_request=True)\n"
] | class Api(object):
"""Primary object that pushes requests into a distinct stream thread."""
def __init__(self):
self.host = settings.get('hosts', 'api_host')
self.token = settings.get('auth', 'token')
self.account_info_endpoint = '%s%s' % (self.host, settings.get('endpoints', 'account_information'))
self.register_account_endpoint = '%s%s' % (self.host, settings.get('endpoints', 'register_account'))
self.task_status_endpoint = '%s%s' % (self.host, settings.get('endpoints', 'task_status'))
self.results_consumed_endpoint = '%s%s' % (self.host, settings.get('endpoints', 'result_consumed'))
self._pending_tasks = {}
self.services = {}
self.session = requests.Session()
@property
def pending_tasks(self):
return self._pending_tasks
@property
def token_header(self):
return {
'Authorization': 'Token %s' % self.token,
}
def _get_info(self):
"""Fetch account information from ASApi host."""
return self._perform_get_request(self.account_info_endpoint, headers=self.token_header)
@staticmethod
def _parse_endpoint(endpoint):
"""Expect endpoint to be dictionary containing `protocol`, `host` and `uri` keys."""
return "{protocol}://{host}{uri}".format(**endpoint)
def _set_endpoints(self, info):
self.stream_endpoints = [self._parse_endpoint(endpoint) for endpoint in info['stream_endpoints']]
submission = info['task_submission_endpoint']
self.submit_endpoint = self._parse_endpoint(submission)
def _set_allowed_services_and_actions(self, services):
"""Expect services to be a list of service dictionaries, each with `name` and `actions` keys."""
for service in services:
self.services[service['name']] = {}
for action in service['actions']:
name = action.pop('name')
self.services[service['name']][name] = action
def setup(self):
info = self._get_info()
self._set_endpoints(info)
self.retrieval_protocol = info['retrieval_protocol']
self._set_allowed_services_and_actions(info['services'])
def allowed_services(self):
return self.services.keys()
def allowed_actions(self, service_name):
return self.services[service_name].keys()
def register_account(self, username, service):
"""Register an account against a service.
The account that we're querying must be referenced during any
future task requests - so we know which account to link the task
too.
"""
data = {
'service': service,
'username': username,
}
return self._perform_post_request(self.register_account_endpoint, data, self.token_header)
def perform_task(self, service, task_name, account, payload, callback=None):
"""Submit a task to the API.
The task is executed asyncronously, and a Task object is returned.
"""
data = {
'service': service,
'action': task_name,
'account': account,
}
data.update(payload)
response = self._perform_post_request(self.submit_endpoint, data, self.token_header)
task = Task(uuid=response['task_id'], callback=callback)
self._pending_tasks[task.uuid] = task
return task
def result_consumed(self, task_id):
"""Report the result as successfully consumed."""
logger.debug('Sending result consumed message.')
data = {
'task_ids': task_id,
}
return self._perform_post_request(self.results_consumed_endpoint, data, self.token_header)
def wait_for_results(self):
while self._pending_tasks:
time.sleep(0.1)
def set_task_result(self, task_id, result):
if task_id not in self._pending_tasks:
time.sleep(1)
try:
self._pending_tasks[task_id].result = result
self._pending_tasks.pop(task_id)
self.result_consumed(task_id)
except KeyError:
pass
_consumed_tasks = []
def append_consumed_task(self, task):
self._consumed_tasks.append(task)
def process_results(self):
if self._consumed_tasks:
consumed_task = self._consumed_tasks.pop(0)
consumed_task.trigger_callback()
else:
time.sleep(0.01)
@staticmethod
def _parse_response(response, post_request=False):
"""Treat the response from ASApi.
The json is dumped before checking the status as even if the response is
not properly formed we are in trouble.
TODO: Streamline error checking.
"""
data = response.json()
if not response.ok:
utils.error_message_and_exit('Push Api Error:', data)
if post_request and not data['success']:
raise Exception('Push Api Error: [%s]' % data['error'])
return data
def _perform_get_request(self, url, headers=None):
response = self.session.get(
url,
headers=headers
)
return self._parse_response(response)
def _perform_post_request(self, url, data, headers=None):
response = self.session.post(
url,
data=data,
headers=headers
)
return self._parse_response(response, post_request=True)
|
reincubate/ricloud | ricloud/api.py | Api.result_consumed | python | def result_consumed(self, task_id):
logger.debug('Sending result consumed message.')
data = {
'task_ids': task_id,
}
return self._perform_post_request(self.results_consumed_endpoint, data, self.token_header) | Report the result as successfully consumed. | train | https://github.com/reincubate/ricloud/blob/e46bce4529fbdca34a4190c18c7219e937e2b697/ricloud/api.py#L117-L123 | [
"def _perform_post_request(self, url, data, headers=None):\n response = self.session.post(\n url,\n data=data,\n headers=headers\n )\n\n return self._parse_response(response, post_request=True)\n"
] | class Api(object):
"""Primary object that pushes requests into a distinct stream thread."""
def __init__(self):
self.host = settings.get('hosts', 'api_host')
self.token = settings.get('auth', 'token')
self.account_info_endpoint = '%s%s' % (self.host, settings.get('endpoints', 'account_information'))
self.register_account_endpoint = '%s%s' % (self.host, settings.get('endpoints', 'register_account'))
self.task_status_endpoint = '%s%s' % (self.host, settings.get('endpoints', 'task_status'))
self.results_consumed_endpoint = '%s%s' % (self.host, settings.get('endpoints', 'result_consumed'))
self._pending_tasks = {}
self.services = {}
self.session = requests.Session()
@property
def pending_tasks(self):
return self._pending_tasks
@property
def token_header(self):
return {
'Authorization': 'Token %s' % self.token,
}
def _get_info(self):
"""Fetch account information from ASApi host."""
return self._perform_get_request(self.account_info_endpoint, headers=self.token_header)
@staticmethod
def _parse_endpoint(endpoint):
"""Expect endpoint to be dictionary containing `protocol`, `host` and `uri` keys."""
return "{protocol}://{host}{uri}".format(**endpoint)
def _set_endpoints(self, info):
self.stream_endpoints = [self._parse_endpoint(endpoint) for endpoint in info['stream_endpoints']]
submission = info['task_submission_endpoint']
self.submit_endpoint = self._parse_endpoint(submission)
def _set_allowed_services_and_actions(self, services):
"""Expect services to be a list of service dictionaries, each with `name` and `actions` keys."""
for service in services:
self.services[service['name']] = {}
for action in service['actions']:
name = action.pop('name')
self.services[service['name']][name] = action
def setup(self):
info = self._get_info()
self._set_endpoints(info)
self.retrieval_protocol = info['retrieval_protocol']
self._set_allowed_services_and_actions(info['services'])
def allowed_services(self):
return self.services.keys()
def allowed_actions(self, service_name):
return self.services[service_name].keys()
def register_account(self, username, service):
"""Register an account against a service.
The account that we're querying must be referenced during any
future task requests - so we know which account to link the task
too.
"""
data = {
'service': service,
'username': username,
}
return self._perform_post_request(self.register_account_endpoint, data, self.token_header)
def perform_task(self, service, task_name, account, payload, callback=None):
"""Submit a task to the API.
The task is executed asyncronously, and a Task object is returned.
"""
data = {
'service': service,
'action': task_name,
'account': account,
}
data.update(payload)
response = self._perform_post_request(self.submit_endpoint, data, self.token_header)
task = Task(uuid=response['task_id'], callback=callback)
self._pending_tasks[task.uuid] = task
return task
def task_status(self, task_id):
"""Find the status of a task."""
data = {
'task_ids': task_id,
}
return self._perform_post_request(self.task_status_endpoint, data, self.token_header)
def wait_for_results(self):
while self._pending_tasks:
time.sleep(0.1)
def set_task_result(self, task_id, result):
if task_id not in self._pending_tasks:
time.sleep(1)
try:
self._pending_tasks[task_id].result = result
self._pending_tasks.pop(task_id)
self.result_consumed(task_id)
except KeyError:
pass
_consumed_tasks = []
def append_consumed_task(self, task):
self._consumed_tasks.append(task)
def process_results(self):
if self._consumed_tasks:
consumed_task = self._consumed_tasks.pop(0)
consumed_task.trigger_callback()
else:
time.sleep(0.01)
@staticmethod
def _parse_response(response, post_request=False):
"""Treat the response from ASApi.
The json is dumped before checking the status as even if the response is
not properly formed we are in trouble.
TODO: Streamline error checking.
"""
data = response.json()
if not response.ok:
utils.error_message_and_exit('Push Api Error:', data)
if post_request and not data['success']:
raise Exception('Push Api Error: [%s]' % data['error'])
return data
def _perform_get_request(self, url, headers=None):
response = self.session.get(
url,
headers=headers
)
return self._parse_response(response)
def _perform_post_request(self, url, data, headers=None):
response = self.session.post(
url,
data=data,
headers=headers
)
return self._parse_response(response, post_request=True)
|
reincubate/ricloud | ricloud/api.py | Api._parse_response | python | def _parse_response(response, post_request=False):
data = response.json()
if not response.ok:
utils.error_message_and_exit('Push Api Error:', data)
if post_request and not data['success']:
raise Exception('Push Api Error: [%s]' % data['error'])
return data | Treat the response from ASApi.
The json is dumped before checking the status as even if the response is
not properly formed we are in trouble.
TODO: Streamline error checking. | train | https://github.com/reincubate/ricloud/blob/e46bce4529fbdca34a4190c18c7219e937e2b697/ricloud/api.py#L152-L168 | [
"def error_message_and_exit(message, error_result):\n \"\"\"Prints error messages in blue, the failed task result and quits.\"\"\"\n if message:\n error_message(message)\n puts(json.dumps(error_result, indent=2))\n sys.exit(1)\n"
] | class Api(object):
"""Primary object that pushes requests into a distinct stream thread."""
def __init__(self):
self.host = settings.get('hosts', 'api_host')
self.token = settings.get('auth', 'token')
self.account_info_endpoint = '%s%s' % (self.host, settings.get('endpoints', 'account_information'))
self.register_account_endpoint = '%s%s' % (self.host, settings.get('endpoints', 'register_account'))
self.task_status_endpoint = '%s%s' % (self.host, settings.get('endpoints', 'task_status'))
self.results_consumed_endpoint = '%s%s' % (self.host, settings.get('endpoints', 'result_consumed'))
self._pending_tasks = {}
self.services = {}
self.session = requests.Session()
@property
def pending_tasks(self):
return self._pending_tasks
@property
def token_header(self):
return {
'Authorization': 'Token %s' % self.token,
}
def _get_info(self):
"""Fetch account information from ASApi host."""
return self._perform_get_request(self.account_info_endpoint, headers=self.token_header)
@staticmethod
def _parse_endpoint(endpoint):
"""Expect endpoint to be dictionary containing `protocol`, `host` and `uri` keys."""
return "{protocol}://{host}{uri}".format(**endpoint)
def _set_endpoints(self, info):
self.stream_endpoints = [self._parse_endpoint(endpoint) for endpoint in info['stream_endpoints']]
submission = info['task_submission_endpoint']
self.submit_endpoint = self._parse_endpoint(submission)
def _set_allowed_services_and_actions(self, services):
"""Expect services to be a list of service dictionaries, each with `name` and `actions` keys."""
for service in services:
self.services[service['name']] = {}
for action in service['actions']:
name = action.pop('name')
self.services[service['name']][name] = action
def setup(self):
info = self._get_info()
self._set_endpoints(info)
self.retrieval_protocol = info['retrieval_protocol']
self._set_allowed_services_and_actions(info['services'])
def allowed_services(self):
return self.services.keys()
def allowed_actions(self, service_name):
return self.services[service_name].keys()
def register_account(self, username, service):
"""Register an account against a service.
The account that we're querying must be referenced during any
future task requests - so we know which account to link the task
too.
"""
data = {
'service': service,
'username': username,
}
return self._perform_post_request(self.register_account_endpoint, data, self.token_header)
def perform_task(self, service, task_name, account, payload, callback=None):
"""Submit a task to the API.
The task is executed asyncronously, and a Task object is returned.
"""
data = {
'service': service,
'action': task_name,
'account': account,
}
data.update(payload)
response = self._perform_post_request(self.submit_endpoint, data, self.token_header)
task = Task(uuid=response['task_id'], callback=callback)
self._pending_tasks[task.uuid] = task
return task
def task_status(self, task_id):
"""Find the status of a task."""
data = {
'task_ids': task_id,
}
return self._perform_post_request(self.task_status_endpoint, data, self.token_header)
def result_consumed(self, task_id):
"""Report the result as successfully consumed."""
logger.debug('Sending result consumed message.')
data = {
'task_ids': task_id,
}
return self._perform_post_request(self.results_consumed_endpoint, data, self.token_header)
def wait_for_results(self):
while self._pending_tasks:
time.sleep(0.1)
def set_task_result(self, task_id, result):
if task_id not in self._pending_tasks:
time.sleep(1)
try:
self._pending_tasks[task_id].result = result
self._pending_tasks.pop(task_id)
self.result_consumed(task_id)
except KeyError:
pass
_consumed_tasks = []
def append_consumed_task(self, task):
self._consumed_tasks.append(task)
def process_results(self):
if self._consumed_tasks:
consumed_task = self._consumed_tasks.pop(0)
consumed_task.trigger_callback()
else:
time.sleep(0.01)
@staticmethod
def _perform_get_request(self, url, headers=None):
response = self.session.get(
url,
headers=headers
)
return self._parse_response(response)
def _perform_post_request(self, url, data, headers=None):
response = self.session.post(
url,
data=data,
headers=headers
)
return self._parse_response(response, post_request=True)
|
jmcarp/betfair.py | betfair/utils.py | get_chunks | python | def get_chunks(sequence, chunk_size):
return [
sequence[idx:idx + chunk_size]
for idx in range(0, len(sequence), chunk_size)
] | Split sequence into chunks.
:param list sequence:
:param int chunk_size: | train | https://github.com/jmcarp/betfair.py/blob/116df2fdc512575d1b4c4f1749d4a5bf98e519ff/betfair/utils.py#L19-L28 | null | # -*- coding: utf-8 -*-
from __future__ import absolute_import
import six
import json
import datetime
import collections
import enum
import decorator
from six.moves import http_client as httplib
from betfair import exceptions
from betfair.meta import utils
from betfair.meta.models import BetfairModel
def get_kwargs(kwargs):
"""Get all keys and values from dictionary where key is not `self`.
:param dict kwargs: Input parameters
"""
return {
key: value for key, value in six.iteritems(kwargs)
if key != 'self'
}
def check_status_code(response, codes=None):
"""Check HTTP status code and raise exception if incorrect.
:param Response response: HTTP response
:param codes: List of accepted codes or callable
:raises: ApiError if code invalid
"""
codes = codes or [httplib.OK]
checker = (
codes
if callable(codes)
else lambda resp: resp.status_code in codes
)
if not checker(response):
raise exceptions.ApiError(response, response.json())
def result_or_error(response):
"""Get `result` field from Betfair response or raise exception if not
found.
:param Response response:
:raises: ApiError if no results passed
"""
data = response.json()
result = data.get('result')
if result is not None:
return result
raise exceptions.ApiError(response, data)
def process_result(result, model=None):
"""Cast response JSON to Betfair model(s).
:param result: Betfair response JSON
:param BetfairModel model: Deserialization format; if `None`, return raw
JSON
"""
if model is None:
return result
if isinstance(result, collections.Sequence):
return [model(**item) for item in result]
return model(**result)
class BetfairEncoder(json.JSONEncoder):
def default(self, o):
if isinstance(o, datetime.datetime):
return o.isoformat()
if isinstance(o, BetfairModel):
o.validate()
return o.serialize()
if isinstance(o, enum.Enum):
return o.name
return super(BetfairEncoder, self).default(o)
def make_payload(base, method, params):
"""Build Betfair JSON-RPC payload.
:param str base: Betfair base ("Sports" or "Account")
:param str method: Betfair endpoint
:param dict params: Request parameters
"""
payload = {
'jsonrpc': '2.0',
'method': '{base}APING/v1.0/{method}'.format(**locals()),
'params': utils.serialize_dict(params),
'id': 1,
}
return payload
@decorator.decorator
def requires_login(func, *args, **kwargs):
"""Decorator to check that the user is logged in. Raises `BetfairError`
if instance variable `session_token` is absent.
"""
self = args[0]
if self.session_token:
return func(*args, **kwargs)
raise exceptions.NotLoggedIn()
|
jmcarp/betfair.py | betfair/utils.py | get_kwargs | python | def get_kwargs(kwargs):
return {
key: value for key, value in six.iteritems(kwargs)
if key != 'self'
} | Get all keys and values from dictionary where key is not `self`.
:param dict kwargs: Input parameters | train | https://github.com/jmcarp/betfair.py/blob/116df2fdc512575d1b4c4f1749d4a5bf98e519ff/betfair/utils.py#L31-L39 | null | # -*- coding: utf-8 -*-
from __future__ import absolute_import
import six
import json
import datetime
import collections
import enum
import decorator
from six.moves import http_client as httplib
from betfair import exceptions
from betfair.meta import utils
from betfair.meta.models import BetfairModel
def get_chunks(sequence, chunk_size):
"""Split sequence into chunks.
:param list sequence:
:param int chunk_size:
"""
return [
sequence[idx:idx + chunk_size]
for idx in range(0, len(sequence), chunk_size)
]
def check_status_code(response, codes=None):
"""Check HTTP status code and raise exception if incorrect.
:param Response response: HTTP response
:param codes: List of accepted codes or callable
:raises: ApiError if code invalid
"""
codes = codes or [httplib.OK]
checker = (
codes
if callable(codes)
else lambda resp: resp.status_code in codes
)
if not checker(response):
raise exceptions.ApiError(response, response.json())
def result_or_error(response):
"""Get `result` field from Betfair response or raise exception if not
found.
:param Response response:
:raises: ApiError if no results passed
"""
data = response.json()
result = data.get('result')
if result is not None:
return result
raise exceptions.ApiError(response, data)
def process_result(result, model=None):
"""Cast response JSON to Betfair model(s).
:param result: Betfair response JSON
:param BetfairModel model: Deserialization format; if `None`, return raw
JSON
"""
if model is None:
return result
if isinstance(result, collections.Sequence):
return [model(**item) for item in result]
return model(**result)
class BetfairEncoder(json.JSONEncoder):
def default(self, o):
if isinstance(o, datetime.datetime):
return o.isoformat()
if isinstance(o, BetfairModel):
o.validate()
return o.serialize()
if isinstance(o, enum.Enum):
return o.name
return super(BetfairEncoder, self).default(o)
def make_payload(base, method, params):
"""Build Betfair JSON-RPC payload.
:param str base: Betfair base ("Sports" or "Account")
:param str method: Betfair endpoint
:param dict params: Request parameters
"""
payload = {
'jsonrpc': '2.0',
'method': '{base}APING/v1.0/{method}'.format(**locals()),
'params': utils.serialize_dict(params),
'id': 1,
}
return payload
@decorator.decorator
def requires_login(func, *args, **kwargs):
"""Decorator to check that the user is logged in. Raises `BetfairError`
if instance variable `session_token` is absent.
"""
self = args[0]
if self.session_token:
return func(*args, **kwargs)
raise exceptions.NotLoggedIn()
|
jmcarp/betfair.py | betfair/utils.py | check_status_code | python | def check_status_code(response, codes=None):
codes = codes or [httplib.OK]
checker = (
codes
if callable(codes)
else lambda resp: resp.status_code in codes
)
if not checker(response):
raise exceptions.ApiError(response, response.json()) | Check HTTP status code and raise exception if incorrect.
:param Response response: HTTP response
:param codes: List of accepted codes or callable
:raises: ApiError if code invalid | train | https://github.com/jmcarp/betfair.py/blob/116df2fdc512575d1b4c4f1749d4a5bf98e519ff/betfair/utils.py#L42-L56 | null | # -*- coding: utf-8 -*-
from __future__ import absolute_import
import six
import json
import datetime
import collections
import enum
import decorator
from six.moves import http_client as httplib
from betfair import exceptions
from betfair.meta import utils
from betfair.meta.models import BetfairModel
def get_chunks(sequence, chunk_size):
"""Split sequence into chunks.
:param list sequence:
:param int chunk_size:
"""
return [
sequence[idx:idx + chunk_size]
for idx in range(0, len(sequence), chunk_size)
]
def get_kwargs(kwargs):
"""Get all keys and values from dictionary where key is not `self`.
:param dict kwargs: Input parameters
"""
return {
key: value for key, value in six.iteritems(kwargs)
if key != 'self'
}
def result_or_error(response):
"""Get `result` field from Betfair response or raise exception if not
found.
:param Response response:
:raises: ApiError if no results passed
"""
data = response.json()
result = data.get('result')
if result is not None:
return result
raise exceptions.ApiError(response, data)
def process_result(result, model=None):
"""Cast response JSON to Betfair model(s).
:param result: Betfair response JSON
:param BetfairModel model: Deserialization format; if `None`, return raw
JSON
"""
if model is None:
return result
if isinstance(result, collections.Sequence):
return [model(**item) for item in result]
return model(**result)
class BetfairEncoder(json.JSONEncoder):
def default(self, o):
if isinstance(o, datetime.datetime):
return o.isoformat()
if isinstance(o, BetfairModel):
o.validate()
return o.serialize()
if isinstance(o, enum.Enum):
return o.name
return super(BetfairEncoder, self).default(o)
def make_payload(base, method, params):
"""Build Betfair JSON-RPC payload.
:param str base: Betfair base ("Sports" or "Account")
:param str method: Betfair endpoint
:param dict params: Request parameters
"""
payload = {
'jsonrpc': '2.0',
'method': '{base}APING/v1.0/{method}'.format(**locals()),
'params': utils.serialize_dict(params),
'id': 1,
}
return payload
@decorator.decorator
def requires_login(func, *args, **kwargs):
"""Decorator to check that the user is logged in. Raises `BetfairError`
if instance variable `session_token` is absent.
"""
self = args[0]
if self.session_token:
return func(*args, **kwargs)
raise exceptions.NotLoggedIn()
|
jmcarp/betfair.py | betfair/utils.py | result_or_error | python | def result_or_error(response):
data = response.json()
result = data.get('result')
if result is not None:
return result
raise exceptions.ApiError(response, data) | Get `result` field from Betfair response or raise exception if not
found.
:param Response response:
:raises: ApiError if no results passed | train | https://github.com/jmcarp/betfair.py/blob/116df2fdc512575d1b4c4f1749d4a5bf98e519ff/betfair/utils.py#L59-L70 | null | # -*- coding: utf-8 -*-
from __future__ import absolute_import
import six
import json
import datetime
import collections
import enum
import decorator
from six.moves import http_client as httplib
from betfair import exceptions
from betfair.meta import utils
from betfair.meta.models import BetfairModel
def get_chunks(sequence, chunk_size):
"""Split sequence into chunks.
:param list sequence:
:param int chunk_size:
"""
return [
sequence[idx:idx + chunk_size]
for idx in range(0, len(sequence), chunk_size)
]
def get_kwargs(kwargs):
"""Get all keys and values from dictionary where key is not `self`.
:param dict kwargs: Input parameters
"""
return {
key: value for key, value in six.iteritems(kwargs)
if key != 'self'
}
def check_status_code(response, codes=None):
"""Check HTTP status code and raise exception if incorrect.
:param Response response: HTTP response
:param codes: List of accepted codes or callable
:raises: ApiError if code invalid
"""
codes = codes or [httplib.OK]
checker = (
codes
if callable(codes)
else lambda resp: resp.status_code in codes
)
if not checker(response):
raise exceptions.ApiError(response, response.json())
def process_result(result, model=None):
"""Cast response JSON to Betfair model(s).
:param result: Betfair response JSON
:param BetfairModel model: Deserialization format; if `None`, return raw
JSON
"""
if model is None:
return result
if isinstance(result, collections.Sequence):
return [model(**item) for item in result]
return model(**result)
class BetfairEncoder(json.JSONEncoder):
def default(self, o):
if isinstance(o, datetime.datetime):
return o.isoformat()
if isinstance(o, BetfairModel):
o.validate()
return o.serialize()
if isinstance(o, enum.Enum):
return o.name
return super(BetfairEncoder, self).default(o)
def make_payload(base, method, params):
"""Build Betfair JSON-RPC payload.
:param str base: Betfair base ("Sports" or "Account")
:param str method: Betfair endpoint
:param dict params: Request parameters
"""
payload = {
'jsonrpc': '2.0',
'method': '{base}APING/v1.0/{method}'.format(**locals()),
'params': utils.serialize_dict(params),
'id': 1,
}
return payload
@decorator.decorator
def requires_login(func, *args, **kwargs):
"""Decorator to check that the user is logged in. Raises `BetfairError`
if instance variable `session_token` is absent.
"""
self = args[0]
if self.session_token:
return func(*args, **kwargs)
raise exceptions.NotLoggedIn()
|
jmcarp/betfair.py | betfair/utils.py | process_result | python | def process_result(result, model=None):
if model is None:
return result
if isinstance(result, collections.Sequence):
return [model(**item) for item in result]
return model(**result) | Cast response JSON to Betfair model(s).
:param result: Betfair response JSON
:param BetfairModel model: Deserialization format; if `None`, return raw
JSON | train | https://github.com/jmcarp/betfair.py/blob/116df2fdc512575d1b4c4f1749d4a5bf98e519ff/betfair/utils.py#L73-L84 | null | # -*- coding: utf-8 -*-
from __future__ import absolute_import
import six
import json
import datetime
import collections
import enum
import decorator
from six.moves import http_client as httplib
from betfair import exceptions
from betfair.meta import utils
from betfair.meta.models import BetfairModel
def get_chunks(sequence, chunk_size):
"""Split sequence into chunks.
:param list sequence:
:param int chunk_size:
"""
return [
sequence[idx:idx + chunk_size]
for idx in range(0, len(sequence), chunk_size)
]
def get_kwargs(kwargs):
"""Get all keys and values from dictionary where key is not `self`.
:param dict kwargs: Input parameters
"""
return {
key: value for key, value in six.iteritems(kwargs)
if key != 'self'
}
def check_status_code(response, codes=None):
"""Check HTTP status code and raise exception if incorrect.
:param Response response: HTTP response
:param codes: List of accepted codes or callable
:raises: ApiError if code invalid
"""
codes = codes or [httplib.OK]
checker = (
codes
if callable(codes)
else lambda resp: resp.status_code in codes
)
if not checker(response):
raise exceptions.ApiError(response, response.json())
def result_or_error(response):
"""Get `result` field from Betfair response or raise exception if not
found.
:param Response response:
:raises: ApiError if no results passed
"""
data = response.json()
result = data.get('result')
if result is not None:
return result
raise exceptions.ApiError(response, data)
class BetfairEncoder(json.JSONEncoder):
def default(self, o):
if isinstance(o, datetime.datetime):
return o.isoformat()
if isinstance(o, BetfairModel):
o.validate()
return o.serialize()
if isinstance(o, enum.Enum):
return o.name
return super(BetfairEncoder, self).default(o)
def make_payload(base, method, params):
"""Build Betfair JSON-RPC payload.
:param str base: Betfair base ("Sports" or "Account")
:param str method: Betfair endpoint
:param dict params: Request parameters
"""
payload = {
'jsonrpc': '2.0',
'method': '{base}APING/v1.0/{method}'.format(**locals()),
'params': utils.serialize_dict(params),
'id': 1,
}
return payload
@decorator.decorator
def requires_login(func, *args, **kwargs):
"""Decorator to check that the user is logged in. Raises `BetfairError`
if instance variable `session_token` is absent.
"""
self = args[0]
if self.session_token:
return func(*args, **kwargs)
raise exceptions.NotLoggedIn()
|
jmcarp/betfair.py | betfair/utils.py | make_payload | python | def make_payload(base, method, params):
payload = {
'jsonrpc': '2.0',
'method': '{base}APING/v1.0/{method}'.format(**locals()),
'params': utils.serialize_dict(params),
'id': 1,
}
return payload | Build Betfair JSON-RPC payload.
:param str base: Betfair base ("Sports" or "Account")
:param str method: Betfair endpoint
:param dict params: Request parameters | train | https://github.com/jmcarp/betfair.py/blob/116df2fdc512575d1b4c4f1749d4a5bf98e519ff/betfair/utils.py#L100-L113 | null | # -*- coding: utf-8 -*-
from __future__ import absolute_import
import six
import json
import datetime
import collections
import enum
import decorator
from six.moves import http_client as httplib
from betfair import exceptions
from betfair.meta import utils
from betfair.meta.models import BetfairModel
def get_chunks(sequence, chunk_size):
"""Split sequence into chunks.
:param list sequence:
:param int chunk_size:
"""
return [
sequence[idx:idx + chunk_size]
for idx in range(0, len(sequence), chunk_size)
]
def get_kwargs(kwargs):
"""Get all keys and values from dictionary where key is not `self`.
:param dict kwargs: Input parameters
"""
return {
key: value for key, value in six.iteritems(kwargs)
if key != 'self'
}
def check_status_code(response, codes=None):
"""Check HTTP status code and raise exception if incorrect.
:param Response response: HTTP response
:param codes: List of accepted codes or callable
:raises: ApiError if code invalid
"""
codes = codes or [httplib.OK]
checker = (
codes
if callable(codes)
else lambda resp: resp.status_code in codes
)
if not checker(response):
raise exceptions.ApiError(response, response.json())
def result_or_error(response):
"""Get `result` field from Betfair response or raise exception if not
found.
:param Response response:
:raises: ApiError if no results passed
"""
data = response.json()
result = data.get('result')
if result is not None:
return result
raise exceptions.ApiError(response, data)
def process_result(result, model=None):
"""Cast response JSON to Betfair model(s).
:param result: Betfair response JSON
:param BetfairModel model: Deserialization format; if `None`, return raw
JSON
"""
if model is None:
return result
if isinstance(result, collections.Sequence):
return [model(**item) for item in result]
return model(**result)
class BetfairEncoder(json.JSONEncoder):
def default(self, o):
if isinstance(o, datetime.datetime):
return o.isoformat()
if isinstance(o, BetfairModel):
o.validate()
return o.serialize()
if isinstance(o, enum.Enum):
return o.name
return super(BetfairEncoder, self).default(o)
@decorator.decorator
def requires_login(func, *args, **kwargs):
"""Decorator to check that the user is logged in. Raises `BetfairError`
if instance variable `session_token` is absent.
"""
self = args[0]
if self.session_token:
return func(*args, **kwargs)
raise exceptions.NotLoggedIn()
|
jmcarp/betfair.py | betfair/utils.py | requires_login | python | def requires_login(func, *args, **kwargs):
self = args[0]
if self.session_token:
return func(*args, **kwargs)
raise exceptions.NotLoggedIn() | Decorator to check that the user is logged in. Raises `BetfairError`
if instance variable `session_token` is absent. | train | https://github.com/jmcarp/betfair.py/blob/116df2fdc512575d1b4c4f1749d4a5bf98e519ff/betfair/utils.py#L117-L124 | null | # -*- coding: utf-8 -*-
from __future__ import absolute_import
import six
import json
import datetime
import collections
import enum
import decorator
from six.moves import http_client as httplib
from betfair import exceptions
from betfair.meta import utils
from betfair.meta.models import BetfairModel
def get_chunks(sequence, chunk_size):
"""Split sequence into chunks.
:param list sequence:
:param int chunk_size:
"""
return [
sequence[idx:idx + chunk_size]
for idx in range(0, len(sequence), chunk_size)
]
def get_kwargs(kwargs):
"""Get all keys and values from dictionary where key is not `self`.
:param dict kwargs: Input parameters
"""
return {
key: value for key, value in six.iteritems(kwargs)
if key != 'self'
}
def check_status_code(response, codes=None):
"""Check HTTP status code and raise exception if incorrect.
:param Response response: HTTP response
:param codes: List of accepted codes or callable
:raises: ApiError if code invalid
"""
codes = codes or [httplib.OK]
checker = (
codes
if callable(codes)
else lambda resp: resp.status_code in codes
)
if not checker(response):
raise exceptions.ApiError(response, response.json())
def result_or_error(response):
"""Get `result` field from Betfair response or raise exception if not
found.
:param Response response:
:raises: ApiError if no results passed
"""
data = response.json()
result = data.get('result')
if result is not None:
return result
raise exceptions.ApiError(response, data)
def process_result(result, model=None):
"""Cast response JSON to Betfair model(s).
:param result: Betfair response JSON
:param BetfairModel model: Deserialization format; if `None`, return raw
JSON
"""
if model is None:
return result
if isinstance(result, collections.Sequence):
return [model(**item) for item in result]
return model(**result)
class BetfairEncoder(json.JSONEncoder):
def default(self, o):
if isinstance(o, datetime.datetime):
return o.isoformat()
if isinstance(o, BetfairModel):
o.validate()
return o.serialize()
if isinstance(o, enum.Enum):
return o.name
return super(BetfairEncoder, self).default(o)
def make_payload(base, method, params):
"""Build Betfair JSON-RPC payload.
:param str base: Betfair base ("Sports" or "Account")
:param str method: Betfair endpoint
:param dict params: Request parameters
"""
payload = {
'jsonrpc': '2.0',
'method': '{base}APING/v1.0/{method}'.format(**locals()),
'params': utils.serialize_dict(params),
'id': 1,
}
return payload
@decorator.decorator
|
jmcarp/betfair.py | betfair/price.py | nearest_price | python | def nearest_price(price, cutoffs=CUTOFFS):
if price <= MIN_PRICE:
return MIN_PRICE
if price > MAX_PRICE:
return MAX_PRICE
price = as_dec(price)
for cutoff, step in cutoffs:
if price < cutoff:
break
step = as_dec(step)
return float((price * step).quantize(2, ROUND_HALF_UP) / step) | Returns the nearest Betfair odds value to price.
Adapted from Anton Zemlyanov's AlgoTrader project (MIT licensed).
https://github.com/AlgoTrader/betfair-sports-api/blob/master/lib/betfair_price.js
:param float price: Approximate Betfair price (i.e. decimal odds value)
:param tuple cutoffs: Optional tuple of (cutoff, step) pairs
:returns: The nearest Befair price
:rtype: float | train | https://github.com/jmcarp/betfair.py/blob/116df2fdc512575d1b4c4f1749d4a5bf98e519ff/betfair/price.py#L49-L70 | [
"def as_dec(value):\n return Decimal(str(value))\n"
] | # -*- coding: utf-8 -*-
from __future__ import division
from decimal import Decimal, ROUND_HALF_UP
CUTOFFS = (
(2, 100),
(3, 50),
(4, 20),
(6, 10),
(10, 5),
(20, 2),
(30, 1),
(50, 0.5),
(100, 0.2),
(1000, 0.1),
)
MIN_PRICE = 1.01
MAX_PRICE = 1000
def as_dec(value):
return Decimal(str(value))
def arange(start, stop, step):
while start < stop:
yield start
start += step
def make_prices(min_price, cutoffs):
prices = []
cursor = as_dec(min_price)
for cutoff, step in cutoffs:
prices.extend(arange(as_dec(cursor), as_dec(cutoff), as_dec(1 / step)))
cursor = cutoff
prices.append(as_dec(MAX_PRICE))
return prices
PRICES = make_prices(MIN_PRICE, CUTOFFS)
def ticks_difference(price_1, price_2):
"""Returns the absolute difference in terms of "ticks" (i.e. individual
price increments) between two Betfair prices.
:param float price_1: An exact, valid Betfair price
:param float price_2: An exact, valid Betfair price
:returns: The absolute value of the difference between the prices in "ticks"
:rtype: int
"""
price_1_index = PRICES.index(as_dec(price_1))
price_2_index = PRICES.index(as_dec(price_2))
return abs(price_1_index - price_2_index)
def price_ticks_away(price, n_ticks):
"""Returns an exact, valid Betfair price that is n_ticks "ticks" away from
the given price. n_ticks may positive, negative or zero (in which case the
same price is returned) but if there is no price n_ticks away from the
given price then an exception will be thrown.
:param float price: An exact, valid Betfair price
:param float n_ticks: The number of ticks away from price the new price is
:returns: An exact, valid Betfair price
:rtype: float
"""
price_index = PRICES.index(as_dec(price))
return float(PRICES[price_index + n_ticks])
|
jmcarp/betfair.py | betfair/price.py | ticks_difference | python | def ticks_difference(price_1, price_2):
price_1_index = PRICES.index(as_dec(price_1))
price_2_index = PRICES.index(as_dec(price_2))
return abs(price_1_index - price_2_index) | Returns the absolute difference in terms of "ticks" (i.e. individual
price increments) between two Betfair prices.
:param float price_1: An exact, valid Betfair price
:param float price_2: An exact, valid Betfair price
:returns: The absolute value of the difference between the prices in "ticks"
:rtype: int | train | https://github.com/jmcarp/betfair.py/blob/116df2fdc512575d1b4c4f1749d4a5bf98e519ff/betfair/price.py#L73-L84 | [
"def as_dec(value):\n return Decimal(str(value))\n"
] | # -*- coding: utf-8 -*-
from __future__ import division
from decimal import Decimal, ROUND_HALF_UP
CUTOFFS = (
(2, 100),
(3, 50),
(4, 20),
(6, 10),
(10, 5),
(20, 2),
(30, 1),
(50, 0.5),
(100, 0.2),
(1000, 0.1),
)
MIN_PRICE = 1.01
MAX_PRICE = 1000
def as_dec(value):
return Decimal(str(value))
def arange(start, stop, step):
while start < stop:
yield start
start += step
def make_prices(min_price, cutoffs):
prices = []
cursor = as_dec(min_price)
for cutoff, step in cutoffs:
prices.extend(arange(as_dec(cursor), as_dec(cutoff), as_dec(1 / step)))
cursor = cutoff
prices.append(as_dec(MAX_PRICE))
return prices
PRICES = make_prices(MIN_PRICE, CUTOFFS)
def nearest_price(price, cutoffs=CUTOFFS):
"""Returns the nearest Betfair odds value to price.
Adapted from Anton Zemlyanov's AlgoTrader project (MIT licensed).
https://github.com/AlgoTrader/betfair-sports-api/blob/master/lib/betfair_price.js
:param float price: Approximate Betfair price (i.e. decimal odds value)
:param tuple cutoffs: Optional tuple of (cutoff, step) pairs
:returns: The nearest Befair price
:rtype: float
"""
if price <= MIN_PRICE:
return MIN_PRICE
if price > MAX_PRICE:
return MAX_PRICE
price = as_dec(price)
for cutoff, step in cutoffs:
if price < cutoff:
break
step = as_dec(step)
return float((price * step).quantize(2, ROUND_HALF_UP) / step)
def price_ticks_away(price, n_ticks):
"""Returns an exact, valid Betfair price that is n_ticks "ticks" away from
the given price. n_ticks may positive, negative or zero (in which case the
same price is returned) but if there is no price n_ticks away from the
given price then an exception will be thrown.
:param float price: An exact, valid Betfair price
:param float n_ticks: The number of ticks away from price the new price is
:returns: An exact, valid Betfair price
:rtype: float
"""
price_index = PRICES.index(as_dec(price))
return float(PRICES[price_index + n_ticks])
|
jmcarp/betfair.py | betfair/price.py | price_ticks_away | python | def price_ticks_away(price, n_ticks):
price_index = PRICES.index(as_dec(price))
return float(PRICES[price_index + n_ticks]) | Returns an exact, valid Betfair price that is n_ticks "ticks" away from
the given price. n_ticks may positive, negative or zero (in which case the
same price is returned) but if there is no price n_ticks away from the
given price then an exception will be thrown.
:param float price: An exact, valid Betfair price
:param float n_ticks: The number of ticks away from price the new price is
:returns: An exact, valid Betfair price
:rtype: float | train | https://github.com/jmcarp/betfair.py/blob/116df2fdc512575d1b4c4f1749d4a5bf98e519ff/betfair/price.py#L87-L99 | [
"def as_dec(value):\n return Decimal(str(value))\n"
] | # -*- coding: utf-8 -*-
from __future__ import division
from decimal import Decimal, ROUND_HALF_UP
CUTOFFS = (
(2, 100),
(3, 50),
(4, 20),
(6, 10),
(10, 5),
(20, 2),
(30, 1),
(50, 0.5),
(100, 0.2),
(1000, 0.1),
)
MIN_PRICE = 1.01
MAX_PRICE = 1000
def as_dec(value):
return Decimal(str(value))
def arange(start, stop, step):
while start < stop:
yield start
start += step
def make_prices(min_price, cutoffs):
prices = []
cursor = as_dec(min_price)
for cutoff, step in cutoffs:
prices.extend(arange(as_dec(cursor), as_dec(cutoff), as_dec(1 / step)))
cursor = cutoff
prices.append(as_dec(MAX_PRICE))
return prices
PRICES = make_prices(MIN_PRICE, CUTOFFS)
def nearest_price(price, cutoffs=CUTOFFS):
"""Returns the nearest Betfair odds value to price.
Adapted from Anton Zemlyanov's AlgoTrader project (MIT licensed).
https://github.com/AlgoTrader/betfair-sports-api/blob/master/lib/betfair_price.js
:param float price: Approximate Betfair price (i.e. decimal odds value)
:param tuple cutoffs: Optional tuple of (cutoff, step) pairs
:returns: The nearest Befair price
:rtype: float
"""
if price <= MIN_PRICE:
return MIN_PRICE
if price > MAX_PRICE:
return MAX_PRICE
price = as_dec(price)
for cutoff, step in cutoffs:
if price < cutoff:
break
step = as_dec(step)
return float((price * step).quantize(2, ROUND_HALF_UP) / step)
def ticks_difference(price_1, price_2):
"""Returns the absolute difference in terms of "ticks" (i.e. individual
price increments) between two Betfair prices.
:param float price_1: An exact, valid Betfair price
:param float price_2: An exact, valid Betfair price
:returns: The absolute value of the difference between the prices in "ticks"
:rtype: int
"""
price_1_index = PRICES.index(as_dec(price_1))
price_2_index = PRICES.index(as_dec(price_2))
return abs(price_1_index - price_2_index)
|
jmcarp/betfair.py | betfair/betfair.py | Betfair.login | python | def login(self, username, password):
response = self.session.post(
os.path.join(self.identity_url, 'certlogin'),
cert=self.cert_file,
data=urllib.urlencode({
'username': username,
'password': password,
}),
headers={
'X-Application': self.app_key,
'Content-Type': 'application/x-www-form-urlencoded',
},
timeout=self.timeout,
)
utils.check_status_code(response, [httplib.OK])
data = response.json()
if data.get('loginStatus') != 'SUCCESS':
raise exceptions.LoginError(response, data)
self.session_token = data['sessionToken'] | Log in to Betfair. Sets `session_token` if successful.
:param str username: Username
:param str password: Password
:raises: BetfairLoginError | train | https://github.com/jmcarp/betfair.py/blob/116df2fdc512575d1b4c4f1749d4a5bf98e519ff/betfair/betfair.py#L93-L117 | [
"def check_status_code(response, codes=None):\n \"\"\"Check HTTP status code and raise exception if incorrect.\n\n :param Response response: HTTP response\n :param codes: List of accepted codes or callable\n :raises: ApiError if code invalid\n \"\"\"\n codes = codes or [httplib.OK]\n checker = ... | class Betfair(object):
"""Betfair API client.
:param str app_key: Optional application identifier
:param str cert_file: Path to self-signed SSL certificate file(s); may be
a *.pem file or a tuple of (*.crt, *.key) files
:param str content_type: Optional content type
:param str locale: Optional location ("australia", "italy", etc.)
:param Session session: Optional Requests session
:param int timeout: Optional timeout duration (seconds)
"""
def __init__(self, app_key, cert_file, content_type='application/json', locale=None,
session=None, timeout=None):
self.app_key = app_key
self.cert_file = cert_file
self.content_type = content_type
self.locale = locale
self.session = session or requests.Session()
self.session_token = None
self.timeout = timeout
@property
def identity_url(self):
return IDENTITY_URLS[self.locale]
@property
def api_url(self):
return API_URLS[self.locale]
@property
def headers(self):
return {
'X-Application': self.app_key,
'X-Authentication': self.session_token,
'Content-Type': self.content_type,
'Accept': 'application/json',
}
def make_auth_request(self, method):
response = self.session.post(
os.path.join(self.identity_url, method),
headers=self.headers,
timeout=self.timeout,
)
utils.check_status_code(response)
data = response.json()
if data.get('status') != 'SUCCESS':
raise exceptions.AuthError(response, data)
def make_api_request(self, base, method, params, codes=None, model=None):
payload = utils.make_payload(base, method, params)
response = self.session.post(
self.api_url,
data=json.dumps(payload, cls=utils.BetfairEncoder),
headers=self.headers,
timeout=self.timeout,
)
utils.check_status_code(response, codes=codes)
result = utils.result_or_error(response)
return utils.process_result(result, model)
# Authentication methods
@utils.requires_login
def keep_alive(self):
"""Reset session timeout.
:raises: AuthError
"""
self.make_auth_request('keepAlive')
@utils.requires_login
def logout(self):
"""Log out and clear `session_token`.
:raises: AuthError
"""
self.make_auth_request('logout')
self.session_token = None
# Bet query methods
@utils.requires_login
def list_event_types(self, filter=None, locale=None):
"""
:param MarketFilter filter:
:param str locale:
"""
filter = filter or models.MarketFilter()
return self.make_api_request(
'Sports',
'listEventTypes',
utils.get_kwargs(locals()),
model=models.EventTypeResult,
)
@utils.requires_login
def list_competitions(self, filter=None, locale=None):
"""
:param MarketFilter filter:
:param str locale:
"""
filter = filter or models.MarketFilter()
return self.make_api_request(
'Sports',
'listCompetitions',
utils.get_kwargs(locals()),
model=models.CompetitionResult,
)
@utils.requires_login
def list_time_ranges(self, granularity, filter=None):
"""
:param TimeGranularity granularity:
:param MarketFilter filter:
"""
filter = filter or models.MarketFilter()
return self.make_api_request(
'Sports',
'listTimeRanges',
utils.get_kwargs(locals()),
model=models.TimeRangeResult,
)
@utils.requires_login
def list_events(self, filter=None, locale=None):
"""
:param MarketFilter filter:
:param str locale:
"""
filter = filter or models.MarketFilter()
return self.make_api_request(
'Sports',
'listEvents',
utils.get_kwargs(locals()),
model=models.EventResult,
)
@utils.requires_login
def list_market_types(self, filter=None, locale=None):
"""
:param MarketFilter filter:
:param str locale:
"""
filter = filter or models.MarketFilter()
return self.make_api_request(
'Sports',
'listMarketTypes',
utils.get_kwargs(locals()),
model=models.MarketTypeResult,
)
@utils.requires_login
def list_countries(self, filter=None, locale=None):
"""
:param MarketFilter filter:
:param str locale:
"""
filter = filter or models.MarketFilter()
return self.make_api_request(
'Sports',
'listCountries',
utils.get_kwargs(locals()),
model=models.CountryCodeResult,
)
@utils.requires_login
def list_venues(self, filter=None, locale=None):
"""
:param MarketFilter filter:
:param str locale:
"""
filter = filter or models.MarketFilter()
return self.make_api_request(
'Sports',
'listVenues',
utils.get_kwargs(locals()),
model=models.VenueResult,
)
@utils.requires_login
def list_market_catalogue(
self, filter=None, max_results=100, market_projection=None, locale=None,
sort=None):
"""
:param MarketFilter filter:
:param int max_results:
:param list market_projection:
:param MarketSort sort:
:param str locale:
"""
filter = filter or models.MarketFilter()
return self.make_api_request(
'Sports',
'listMarketCatalogue',
utils.get_kwargs(locals()),
model=models.MarketCatalogue,
)
@utils.requires_login
def list_market_book(
self, market_ids, price_projection=None, order_projection=None,
match_projection=None, currency_code=None, locale=None):
"""
:param list market_ids: List of market IDs
:param PriceProjection price_projection:
:param OrderProjection order_projection:
:param MatchProjection match_projection:
:param str currency_code:
:param str locale:
"""
return self.make_api_request(
'Sports',
'listMarketBook',
utils.get_kwargs(locals()),
model=models.MarketBook,
)
@utils.requires_login
def list_market_profit_and_loss(
self, market_ids, include_settled_bets=False,
include_bsp_bets=None, net_of_commission=None):
"""Retrieve profit and loss for a given list of markets.
:param list market_ids: List of markets to calculate profit and loss
:param bool include_settled_bets: Option to include settled bets
:param bool include_bsp_bets: Option to include BSP bets
:param bool net_of_commission: Option to return profit and loss net of
users current commission rate for this market including any special
tariffs
"""
return self.make_api_request(
'Sports',
'listMarketProfitAndLoss',
utils.get_kwargs(locals()),
model=models.MarketProfitAndLoss,
)
# Chunked iterators for list methods
def iter_list_market_book(self, market_ids, chunk_size, **kwargs):
"""Split call to `list_market_book` into separate requests.
:param list market_ids: List of market IDs
:param int chunk_size: Number of records per chunk
:param dict kwargs: Arguments passed to `list_market_book`
"""
return itertools.chain(*(
self.list_market_book(market_chunk, **kwargs)
for market_chunk in utils.get_chunks(market_ids, chunk_size)
))
def iter_list_market_profit_and_loss(
self, market_ids, chunk_size, **kwargs):
"""Split call to `list_market_profit_and_loss` into separate requests.
:param list market_ids: List of market IDs
:param int chunk_size: Number of records per chunk
:param dict kwargs: Arguments passed to `list_market_profit_and_loss`
"""
return itertools.chain(*(
self.list_market_profit_and_loss(market_chunk, **kwargs)
for market_chunk in utils.get_chunks(market_ids, chunk_size)
))
# Betting methods
@utils.requires_login
def list_current_orders(
self, bet_ids=None, market_ids=None, order_projection=None,
date_range=None, order_by=None, sort_dir=None, from_record=None,
record_count=None):
"""
:param bet_ids:
:param market_ids:
:param order_projection:
:param date_range:
:param order_by:
:param sort_dir:
:param from_record:
:param record_count:
"""
return self.make_api_request(
'Sports',
'listCurrentOrders',
utils.get_kwargs(locals()),
model=models.CurrentOrderSummaryReport,
)
@utils.requires_login
def list_cleared_orders(
self, bet_status, event_type_ids, event_ids, market_ids,
runner_ids, bet_ids, side, settled_date_range, group_by,
include_item_description, locale, from_record, record_count):
"""
:param bet_status:
:param event_type_ids:
:param event_ids:
:param market_ids:
:param runner_ids:
:param bet_ids:
:param side:
:param settled_date_range:
:param group_by:
:param include_item_description:
:param locale:
:param from_record:
:param record_count:
"""
return self.make_api_request(
'Sports',
'listClearedOrders',
utils.get_kwargs(locals()),
model=models.ClearedOrderSummaryReport,
)
@utils.requires_login
def place_orders(self, market_id, instructions, customer_ref=None):
"""Place new orders into market. This operation is atomic in that all
orders will be placed or none will be placed.
:param str market_id: The market id these orders are to be placed on
:param list instructions: List of `PlaceInstruction` objects
:param str customer_ref: Optional order identifier string
"""
return self.make_api_request(
'Sports',
'placeOrders',
utils.get_kwargs(locals()),
model=models.PlaceExecutionReport,
)
@utils.requires_login
def cancel_orders(self, market_id, instructions, customer_ref=None):
"""Cancel all bets OR cancel all bets on a market OR fully or
partially cancel particular orders on a market.
:param str market_id: If not supplied all bets are cancelled
:param list instructions: List of `CancelInstruction` objects
:param str customer_ref: Optional order identifier string
"""
return self.make_api_request(
'Sports',
'cancelOrders',
utils.get_kwargs(locals()),
model=models.CancelExecutionReport,
)
@utils.requires_login
def replace_orders(self, market_id, instructions, customer_ref=None):
"""This operation is logically a bulk cancel followed by a bulk place.
The cancel is completed first then the new orders are placed.
:param str market_id: The market id these orders are to be placed on
:param list instructions: List of `ReplaceInstruction` objects
:param str customer_ref: Optional order identifier string
"""
return self.make_api_request(
'Sports',
'replaceOrders',
utils.get_kwargs(locals()),
model=models.ReplaceExecutionReport,
)
@utils.requires_login
def update_orders(self, market_id, instructions, customer_ref=None):
"""Update non-exposure changing fields.
:param str market_id: The market id these orders are to be placed on
:param list instructions: List of `UpdateInstruction` objects
:param str customer_ref: Optional order identifier string
"""
return self.make_api_request(
'Sports',
'updateOrders',
utils.get_kwargs(locals()),
model=models.UpdateExecutionReport,
)
@utils.requires_login
def get_account_funds(self, wallet=None):
"""Get available to bet amount.
:param Wallet wallet: Name of the wallet in question
"""
return self.make_api_request(
'Account',
'getAccountFunds',
utils.get_kwargs(locals()),
model=models.AccountFundsResponse,
)
@utils.requires_login
def get_account_statement(
self, locale=None, from_record=None, record_count=None,
item_date_range=None, include_item=None, wallet=None):
"""Get account statement.
:param str locale: The language to be used where applicable
:param int from_record: Specifies the first record that will be returned
:param int record_count: Specifies the maximum number of records to be returned
:param TimeRange item_date_range: Return items with an itemDate within this date range
:param IncludeItem include_item: Which items to include
:param Wallet wallte: Which wallet to return statementItems for
"""
return self.make_api_request(
'Account',
'getAccountStatement',
utils.get_kwargs(locals()),
model=models.AccountStatementReport,
)
@utils.requires_login
def get_account_details(self):
"""Returns the details relating your account, including your discount
rate and Betfair point balance.
"""
return self.make_api_request(
'Account',
'getAccountDetails',
utils.get_kwargs(locals()),
model=models.AccountDetailsResponse,
)
@utils.requires_login
def list_currency_rates(self, from_currency=None):
"""Returns a list of currency rates based on given currency
:param str from_currency: The currency from which the rates are computed
"""
return self.make_api_request(
'Account',
'listCurrencyRates',
utils.get_kwargs(locals()),
model=models.CurrencyRate,
)
@utils.requires_login
def transfer_funds(self, from_, to, amount):
"""Transfer funds between the UK Exchange and Australian Exchange wallets.
:param Wallet from_: Source wallet
:param Wallet to: Destination wallet
:param float amount: Amount to transfer
"""
return self.make_api_request(
'Account',
'transferFunds',
utils.get_kwargs(locals()),
model=models.TransferResponse,
)
|
jmcarp/betfair.py | betfair/betfair.py | Betfair.list_market_profit_and_loss | python | def list_market_profit_and_loss(
self, market_ids, include_settled_bets=False,
include_bsp_bets=None, net_of_commission=None):
return self.make_api_request(
'Sports',
'listMarketProfitAndLoss',
utils.get_kwargs(locals()),
model=models.MarketProfitAndLoss,
) | Retrieve profit and loss for a given list of markets.
:param list market_ids: List of markets to calculate profit and loss
:param bool include_settled_bets: Option to include settled bets
:param bool include_bsp_bets: Option to include BSP bets
:param bool net_of_commission: Option to return profit and loss net of
users current commission rate for this market including any special
tariffs | train | https://github.com/jmcarp/betfair.py/blob/116df2fdc512575d1b4c4f1749d4a5bf98e519ff/betfair/betfair.py#L284-L301 | [
"def get_kwargs(kwargs):\n \"\"\"Get all keys and values from dictionary where key is not `self`.\n\n :param dict kwargs: Input parameters\n \"\"\"\n return {\n key: value for key, value in six.iteritems(kwargs)\n if key != 'self'\n }\n",
"def make_api_request(self, base, method, para... | class Betfair(object):
"""Betfair API client.
:param str app_key: Optional application identifier
:param str cert_file: Path to self-signed SSL certificate file(s); may be
a *.pem file or a tuple of (*.crt, *.key) files
:param str content_type: Optional content type
:param str locale: Optional location ("australia", "italy", etc.)
:param Session session: Optional Requests session
:param int timeout: Optional timeout duration (seconds)
"""
def __init__(self, app_key, cert_file, content_type='application/json', locale=None,
session=None, timeout=None):
self.app_key = app_key
self.cert_file = cert_file
self.content_type = content_type
self.locale = locale
self.session = session or requests.Session()
self.session_token = None
self.timeout = timeout
@property
def identity_url(self):
return IDENTITY_URLS[self.locale]
@property
def api_url(self):
return API_URLS[self.locale]
@property
def headers(self):
return {
'X-Application': self.app_key,
'X-Authentication': self.session_token,
'Content-Type': self.content_type,
'Accept': 'application/json',
}
def make_auth_request(self, method):
response = self.session.post(
os.path.join(self.identity_url, method),
headers=self.headers,
timeout=self.timeout,
)
utils.check_status_code(response)
data = response.json()
if data.get('status') != 'SUCCESS':
raise exceptions.AuthError(response, data)
def make_api_request(self, base, method, params, codes=None, model=None):
payload = utils.make_payload(base, method, params)
response = self.session.post(
self.api_url,
data=json.dumps(payload, cls=utils.BetfairEncoder),
headers=self.headers,
timeout=self.timeout,
)
utils.check_status_code(response, codes=codes)
result = utils.result_or_error(response)
return utils.process_result(result, model)
# Authentication methods
def login(self, username, password):
"""Log in to Betfair. Sets `session_token` if successful.
:param str username: Username
:param str password: Password
:raises: BetfairLoginError
"""
response = self.session.post(
os.path.join(self.identity_url, 'certlogin'),
cert=self.cert_file,
data=urllib.urlencode({
'username': username,
'password': password,
}),
headers={
'X-Application': self.app_key,
'Content-Type': 'application/x-www-form-urlencoded',
},
timeout=self.timeout,
)
utils.check_status_code(response, [httplib.OK])
data = response.json()
if data.get('loginStatus') != 'SUCCESS':
raise exceptions.LoginError(response, data)
self.session_token = data['sessionToken']
@utils.requires_login
def keep_alive(self):
"""Reset session timeout.
:raises: AuthError
"""
self.make_auth_request('keepAlive')
@utils.requires_login
def logout(self):
"""Log out and clear `session_token`.
:raises: AuthError
"""
self.make_auth_request('logout')
self.session_token = None
# Bet query methods
@utils.requires_login
def list_event_types(self, filter=None, locale=None):
"""
:param MarketFilter filter:
:param str locale:
"""
filter = filter or models.MarketFilter()
return self.make_api_request(
'Sports',
'listEventTypes',
utils.get_kwargs(locals()),
model=models.EventTypeResult,
)
@utils.requires_login
def list_competitions(self, filter=None, locale=None):
"""
:param MarketFilter filter:
:param str locale:
"""
filter = filter or models.MarketFilter()
return self.make_api_request(
'Sports',
'listCompetitions',
utils.get_kwargs(locals()),
model=models.CompetitionResult,
)
@utils.requires_login
def list_time_ranges(self, granularity, filter=None):
"""
:param TimeGranularity granularity:
:param MarketFilter filter:
"""
filter = filter or models.MarketFilter()
return self.make_api_request(
'Sports',
'listTimeRanges',
utils.get_kwargs(locals()),
model=models.TimeRangeResult,
)
@utils.requires_login
def list_events(self, filter=None, locale=None):
"""
:param MarketFilter filter:
:param str locale:
"""
filter = filter or models.MarketFilter()
return self.make_api_request(
'Sports',
'listEvents',
utils.get_kwargs(locals()),
model=models.EventResult,
)
@utils.requires_login
def list_market_types(self, filter=None, locale=None):
"""
:param MarketFilter filter:
:param str locale:
"""
filter = filter or models.MarketFilter()
return self.make_api_request(
'Sports',
'listMarketTypes',
utils.get_kwargs(locals()),
model=models.MarketTypeResult,
)
@utils.requires_login
def list_countries(self, filter=None, locale=None):
"""
:param MarketFilter filter:
:param str locale:
"""
filter = filter or models.MarketFilter()
return self.make_api_request(
'Sports',
'listCountries',
utils.get_kwargs(locals()),
model=models.CountryCodeResult,
)
@utils.requires_login
def list_venues(self, filter=None, locale=None):
"""
:param MarketFilter filter:
:param str locale:
"""
filter = filter or models.MarketFilter()
return self.make_api_request(
'Sports',
'listVenues',
utils.get_kwargs(locals()),
model=models.VenueResult,
)
@utils.requires_login
def list_market_catalogue(
self, filter=None, max_results=100, market_projection=None, locale=None,
sort=None):
"""
:param MarketFilter filter:
:param int max_results:
:param list market_projection:
:param MarketSort sort:
:param str locale:
"""
filter = filter or models.MarketFilter()
return self.make_api_request(
'Sports',
'listMarketCatalogue',
utils.get_kwargs(locals()),
model=models.MarketCatalogue,
)
@utils.requires_login
def list_market_book(
self, market_ids, price_projection=None, order_projection=None,
match_projection=None, currency_code=None, locale=None):
"""
:param list market_ids: List of market IDs
:param PriceProjection price_projection:
:param OrderProjection order_projection:
:param MatchProjection match_projection:
:param str currency_code:
:param str locale:
"""
return self.make_api_request(
'Sports',
'listMarketBook',
utils.get_kwargs(locals()),
model=models.MarketBook,
)
@utils.requires_login
# Chunked iterators for list methods
def iter_list_market_book(self, market_ids, chunk_size, **kwargs):
"""Split call to `list_market_book` into separate requests.
:param list market_ids: List of market IDs
:param int chunk_size: Number of records per chunk
:param dict kwargs: Arguments passed to `list_market_book`
"""
return itertools.chain(*(
self.list_market_book(market_chunk, **kwargs)
for market_chunk in utils.get_chunks(market_ids, chunk_size)
))
def iter_list_market_profit_and_loss(
self, market_ids, chunk_size, **kwargs):
"""Split call to `list_market_profit_and_loss` into separate requests.
:param list market_ids: List of market IDs
:param int chunk_size: Number of records per chunk
:param dict kwargs: Arguments passed to `list_market_profit_and_loss`
"""
return itertools.chain(*(
self.list_market_profit_and_loss(market_chunk, **kwargs)
for market_chunk in utils.get_chunks(market_ids, chunk_size)
))
# Betting methods
@utils.requires_login
def list_current_orders(
self, bet_ids=None, market_ids=None, order_projection=None,
date_range=None, order_by=None, sort_dir=None, from_record=None,
record_count=None):
"""
:param bet_ids:
:param market_ids:
:param order_projection:
:param date_range:
:param order_by:
:param sort_dir:
:param from_record:
:param record_count:
"""
return self.make_api_request(
'Sports',
'listCurrentOrders',
utils.get_kwargs(locals()),
model=models.CurrentOrderSummaryReport,
)
@utils.requires_login
def list_cleared_orders(
self, bet_status, event_type_ids, event_ids, market_ids,
runner_ids, bet_ids, side, settled_date_range, group_by,
include_item_description, locale, from_record, record_count):
"""
:param bet_status:
:param event_type_ids:
:param event_ids:
:param market_ids:
:param runner_ids:
:param bet_ids:
:param side:
:param settled_date_range:
:param group_by:
:param include_item_description:
:param locale:
:param from_record:
:param record_count:
"""
return self.make_api_request(
'Sports',
'listClearedOrders',
utils.get_kwargs(locals()),
model=models.ClearedOrderSummaryReport,
)
@utils.requires_login
def place_orders(self, market_id, instructions, customer_ref=None):
"""Place new orders into market. This operation is atomic in that all
orders will be placed or none will be placed.
:param str market_id: The market id these orders are to be placed on
:param list instructions: List of `PlaceInstruction` objects
:param str customer_ref: Optional order identifier string
"""
return self.make_api_request(
'Sports',
'placeOrders',
utils.get_kwargs(locals()),
model=models.PlaceExecutionReport,
)
@utils.requires_login
def cancel_orders(self, market_id, instructions, customer_ref=None):
"""Cancel all bets OR cancel all bets on a market OR fully or
partially cancel particular orders on a market.
:param str market_id: If not supplied all bets are cancelled
:param list instructions: List of `CancelInstruction` objects
:param str customer_ref: Optional order identifier string
"""
return self.make_api_request(
'Sports',
'cancelOrders',
utils.get_kwargs(locals()),
model=models.CancelExecutionReport,
)
@utils.requires_login
def replace_orders(self, market_id, instructions, customer_ref=None):
"""This operation is logically a bulk cancel followed by a bulk place.
The cancel is completed first then the new orders are placed.
:param str market_id: The market id these orders are to be placed on
:param list instructions: List of `ReplaceInstruction` objects
:param str customer_ref: Optional order identifier string
"""
return self.make_api_request(
'Sports',
'replaceOrders',
utils.get_kwargs(locals()),
model=models.ReplaceExecutionReport,
)
@utils.requires_login
def update_orders(self, market_id, instructions, customer_ref=None):
"""Update non-exposure changing fields.
:param str market_id: The market id these orders are to be placed on
:param list instructions: List of `UpdateInstruction` objects
:param str customer_ref: Optional order identifier string
"""
return self.make_api_request(
'Sports',
'updateOrders',
utils.get_kwargs(locals()),
model=models.UpdateExecutionReport,
)
@utils.requires_login
def get_account_funds(self, wallet=None):
"""Get available to bet amount.
:param Wallet wallet: Name of the wallet in question
"""
return self.make_api_request(
'Account',
'getAccountFunds',
utils.get_kwargs(locals()),
model=models.AccountFundsResponse,
)
@utils.requires_login
def get_account_statement(
self, locale=None, from_record=None, record_count=None,
item_date_range=None, include_item=None, wallet=None):
"""Get account statement.
:param str locale: The language to be used where applicable
:param int from_record: Specifies the first record that will be returned
:param int record_count: Specifies the maximum number of records to be returned
:param TimeRange item_date_range: Return items with an itemDate within this date range
:param IncludeItem include_item: Which items to include
:param Wallet wallte: Which wallet to return statementItems for
"""
return self.make_api_request(
'Account',
'getAccountStatement',
utils.get_kwargs(locals()),
model=models.AccountStatementReport,
)
@utils.requires_login
def get_account_details(self):
"""Returns the details relating your account, including your discount
rate and Betfair point balance.
"""
return self.make_api_request(
'Account',
'getAccountDetails',
utils.get_kwargs(locals()),
model=models.AccountDetailsResponse,
)
@utils.requires_login
def list_currency_rates(self, from_currency=None):
"""Returns a list of currency rates based on given currency
:param str from_currency: The currency from which the rates are computed
"""
return self.make_api_request(
'Account',
'listCurrencyRates',
utils.get_kwargs(locals()),
model=models.CurrencyRate,
)
@utils.requires_login
def transfer_funds(self, from_, to, amount):
"""Transfer funds between the UK Exchange and Australian Exchange wallets.
:param Wallet from_: Source wallet
:param Wallet to: Destination wallet
:param float amount: Amount to transfer
"""
return self.make_api_request(
'Account',
'transferFunds',
utils.get_kwargs(locals()),
model=models.TransferResponse,
)
|
jmcarp/betfair.py | betfair/betfair.py | Betfair.iter_list_market_book | python | def iter_list_market_book(self, market_ids, chunk_size, **kwargs):
return itertools.chain(*(
self.list_market_book(market_chunk, **kwargs)
for market_chunk in utils.get_chunks(market_ids, chunk_size)
)) | Split call to `list_market_book` into separate requests.
:param list market_ids: List of market IDs
:param int chunk_size: Number of records per chunk
:param dict kwargs: Arguments passed to `list_market_book` | train | https://github.com/jmcarp/betfair.py/blob/116df2fdc512575d1b4c4f1749d4a5bf98e519ff/betfair/betfair.py#L305-L315 | [
"def get_chunks(sequence, chunk_size):\n \"\"\"Split sequence into chunks.\n\n :param list sequence:\n :param int chunk_size:\n \"\"\"\n return [\n sequence[idx:idx + chunk_size]\n for idx in range(0, len(sequence), chunk_size)\n ]\n"
] | class Betfair(object):
"""Betfair API client.
:param str app_key: Optional application identifier
:param str cert_file: Path to self-signed SSL certificate file(s); may be
a *.pem file or a tuple of (*.crt, *.key) files
:param str content_type: Optional content type
:param str locale: Optional location ("australia", "italy", etc.)
:param Session session: Optional Requests session
:param int timeout: Optional timeout duration (seconds)
"""
def __init__(self, app_key, cert_file, content_type='application/json', locale=None,
session=None, timeout=None):
self.app_key = app_key
self.cert_file = cert_file
self.content_type = content_type
self.locale = locale
self.session = session or requests.Session()
self.session_token = None
self.timeout = timeout
@property
def identity_url(self):
return IDENTITY_URLS[self.locale]
@property
def api_url(self):
return API_URLS[self.locale]
@property
def headers(self):
return {
'X-Application': self.app_key,
'X-Authentication': self.session_token,
'Content-Type': self.content_type,
'Accept': 'application/json',
}
def make_auth_request(self, method):
response = self.session.post(
os.path.join(self.identity_url, method),
headers=self.headers,
timeout=self.timeout,
)
utils.check_status_code(response)
data = response.json()
if data.get('status') != 'SUCCESS':
raise exceptions.AuthError(response, data)
def make_api_request(self, base, method, params, codes=None, model=None):
payload = utils.make_payload(base, method, params)
response = self.session.post(
self.api_url,
data=json.dumps(payload, cls=utils.BetfairEncoder),
headers=self.headers,
timeout=self.timeout,
)
utils.check_status_code(response, codes=codes)
result = utils.result_or_error(response)
return utils.process_result(result, model)
# Authentication methods
def login(self, username, password):
"""Log in to Betfair. Sets `session_token` if successful.
:param str username: Username
:param str password: Password
:raises: BetfairLoginError
"""
response = self.session.post(
os.path.join(self.identity_url, 'certlogin'),
cert=self.cert_file,
data=urllib.urlencode({
'username': username,
'password': password,
}),
headers={
'X-Application': self.app_key,
'Content-Type': 'application/x-www-form-urlencoded',
},
timeout=self.timeout,
)
utils.check_status_code(response, [httplib.OK])
data = response.json()
if data.get('loginStatus') != 'SUCCESS':
raise exceptions.LoginError(response, data)
self.session_token = data['sessionToken']
@utils.requires_login
def keep_alive(self):
"""Reset session timeout.
:raises: AuthError
"""
self.make_auth_request('keepAlive')
@utils.requires_login
def logout(self):
"""Log out and clear `session_token`.
:raises: AuthError
"""
self.make_auth_request('logout')
self.session_token = None
# Bet query methods
@utils.requires_login
def list_event_types(self, filter=None, locale=None):
"""
:param MarketFilter filter:
:param str locale:
"""
filter = filter or models.MarketFilter()
return self.make_api_request(
'Sports',
'listEventTypes',
utils.get_kwargs(locals()),
model=models.EventTypeResult,
)
@utils.requires_login
def list_competitions(self, filter=None, locale=None):
"""
:param MarketFilter filter:
:param str locale:
"""
filter = filter or models.MarketFilter()
return self.make_api_request(
'Sports',
'listCompetitions',
utils.get_kwargs(locals()),
model=models.CompetitionResult,
)
@utils.requires_login
def list_time_ranges(self, granularity, filter=None):
"""
:param TimeGranularity granularity:
:param MarketFilter filter:
"""
filter = filter or models.MarketFilter()
return self.make_api_request(
'Sports',
'listTimeRanges',
utils.get_kwargs(locals()),
model=models.TimeRangeResult,
)
@utils.requires_login
def list_events(self, filter=None, locale=None):
"""
:param MarketFilter filter:
:param str locale:
"""
filter = filter or models.MarketFilter()
return self.make_api_request(
'Sports',
'listEvents',
utils.get_kwargs(locals()),
model=models.EventResult,
)
@utils.requires_login
def list_market_types(self, filter=None, locale=None):
"""
:param MarketFilter filter:
:param str locale:
"""
filter = filter or models.MarketFilter()
return self.make_api_request(
'Sports',
'listMarketTypes',
utils.get_kwargs(locals()),
model=models.MarketTypeResult,
)
@utils.requires_login
def list_countries(self, filter=None, locale=None):
"""
:param MarketFilter filter:
:param str locale:
"""
filter = filter or models.MarketFilter()
return self.make_api_request(
'Sports',
'listCountries',
utils.get_kwargs(locals()),
model=models.CountryCodeResult,
)
@utils.requires_login
def list_venues(self, filter=None, locale=None):
"""
:param MarketFilter filter:
:param str locale:
"""
filter = filter or models.MarketFilter()
return self.make_api_request(
'Sports',
'listVenues',
utils.get_kwargs(locals()),
model=models.VenueResult,
)
@utils.requires_login
def list_market_catalogue(
self, filter=None, max_results=100, market_projection=None, locale=None,
sort=None):
"""
:param MarketFilter filter:
:param int max_results:
:param list market_projection:
:param MarketSort sort:
:param str locale:
"""
filter = filter or models.MarketFilter()
return self.make_api_request(
'Sports',
'listMarketCatalogue',
utils.get_kwargs(locals()),
model=models.MarketCatalogue,
)
@utils.requires_login
def list_market_book(
self, market_ids, price_projection=None, order_projection=None,
match_projection=None, currency_code=None, locale=None):
"""
:param list market_ids: List of market IDs
:param PriceProjection price_projection:
:param OrderProjection order_projection:
:param MatchProjection match_projection:
:param str currency_code:
:param str locale:
"""
return self.make_api_request(
'Sports',
'listMarketBook',
utils.get_kwargs(locals()),
model=models.MarketBook,
)
@utils.requires_login
def list_market_profit_and_loss(
self, market_ids, include_settled_bets=False,
include_bsp_bets=None, net_of_commission=None):
"""Retrieve profit and loss for a given list of markets.
:param list market_ids: List of markets to calculate profit and loss
:param bool include_settled_bets: Option to include settled bets
:param bool include_bsp_bets: Option to include BSP bets
:param bool net_of_commission: Option to return profit and loss net of
users current commission rate for this market including any special
tariffs
"""
return self.make_api_request(
'Sports',
'listMarketProfitAndLoss',
utils.get_kwargs(locals()),
model=models.MarketProfitAndLoss,
)
# Chunked iterators for list methods
def iter_list_market_profit_and_loss(
self, market_ids, chunk_size, **kwargs):
"""Split call to `list_market_profit_and_loss` into separate requests.
:param list market_ids: List of market IDs
:param int chunk_size: Number of records per chunk
:param dict kwargs: Arguments passed to `list_market_profit_and_loss`
"""
return itertools.chain(*(
self.list_market_profit_and_loss(market_chunk, **kwargs)
for market_chunk in utils.get_chunks(market_ids, chunk_size)
))
# Betting methods
@utils.requires_login
def list_current_orders(
self, bet_ids=None, market_ids=None, order_projection=None,
date_range=None, order_by=None, sort_dir=None, from_record=None,
record_count=None):
"""
:param bet_ids:
:param market_ids:
:param order_projection:
:param date_range:
:param order_by:
:param sort_dir:
:param from_record:
:param record_count:
"""
return self.make_api_request(
'Sports',
'listCurrentOrders',
utils.get_kwargs(locals()),
model=models.CurrentOrderSummaryReport,
)
@utils.requires_login
def list_cleared_orders(
self, bet_status, event_type_ids, event_ids, market_ids,
runner_ids, bet_ids, side, settled_date_range, group_by,
include_item_description, locale, from_record, record_count):
"""
:param bet_status:
:param event_type_ids:
:param event_ids:
:param market_ids:
:param runner_ids:
:param bet_ids:
:param side:
:param settled_date_range:
:param group_by:
:param include_item_description:
:param locale:
:param from_record:
:param record_count:
"""
return self.make_api_request(
'Sports',
'listClearedOrders',
utils.get_kwargs(locals()),
model=models.ClearedOrderSummaryReport,
)
@utils.requires_login
def place_orders(self, market_id, instructions, customer_ref=None):
"""Place new orders into market. This operation is atomic in that all
orders will be placed or none will be placed.
:param str market_id: The market id these orders are to be placed on
:param list instructions: List of `PlaceInstruction` objects
:param str customer_ref: Optional order identifier string
"""
return self.make_api_request(
'Sports',
'placeOrders',
utils.get_kwargs(locals()),
model=models.PlaceExecutionReport,
)
@utils.requires_login
def cancel_orders(self, market_id, instructions, customer_ref=None):
"""Cancel all bets OR cancel all bets on a market OR fully or
partially cancel particular orders on a market.
:param str market_id: If not supplied all bets are cancelled
:param list instructions: List of `CancelInstruction` objects
:param str customer_ref: Optional order identifier string
"""
return self.make_api_request(
'Sports',
'cancelOrders',
utils.get_kwargs(locals()),
model=models.CancelExecutionReport,
)
@utils.requires_login
def replace_orders(self, market_id, instructions, customer_ref=None):
"""This operation is logically a bulk cancel followed by a bulk place.
The cancel is completed first then the new orders are placed.
:param str market_id: The market id these orders are to be placed on
:param list instructions: List of `ReplaceInstruction` objects
:param str customer_ref: Optional order identifier string
"""
return self.make_api_request(
'Sports',
'replaceOrders',
utils.get_kwargs(locals()),
model=models.ReplaceExecutionReport,
)
@utils.requires_login
def update_orders(self, market_id, instructions, customer_ref=None):
"""Update non-exposure changing fields.
:param str market_id: The market id these orders are to be placed on
:param list instructions: List of `UpdateInstruction` objects
:param str customer_ref: Optional order identifier string
"""
return self.make_api_request(
'Sports',
'updateOrders',
utils.get_kwargs(locals()),
model=models.UpdateExecutionReport,
)
@utils.requires_login
def get_account_funds(self, wallet=None):
"""Get available to bet amount.
:param Wallet wallet: Name of the wallet in question
"""
return self.make_api_request(
'Account',
'getAccountFunds',
utils.get_kwargs(locals()),
model=models.AccountFundsResponse,
)
@utils.requires_login
def get_account_statement(
self, locale=None, from_record=None, record_count=None,
item_date_range=None, include_item=None, wallet=None):
"""Get account statement.
:param str locale: The language to be used where applicable
:param int from_record: Specifies the first record that will be returned
:param int record_count: Specifies the maximum number of records to be returned
:param TimeRange item_date_range: Return items with an itemDate within this date range
:param IncludeItem include_item: Which items to include
:param Wallet wallte: Which wallet to return statementItems for
"""
return self.make_api_request(
'Account',
'getAccountStatement',
utils.get_kwargs(locals()),
model=models.AccountStatementReport,
)
@utils.requires_login
def get_account_details(self):
"""Returns the details relating your account, including your discount
rate and Betfair point balance.
"""
return self.make_api_request(
'Account',
'getAccountDetails',
utils.get_kwargs(locals()),
model=models.AccountDetailsResponse,
)
@utils.requires_login
def list_currency_rates(self, from_currency=None):
"""Returns a list of currency rates based on given currency
:param str from_currency: The currency from which the rates are computed
"""
return self.make_api_request(
'Account',
'listCurrencyRates',
utils.get_kwargs(locals()),
model=models.CurrencyRate,
)
@utils.requires_login
def transfer_funds(self, from_, to, amount):
"""Transfer funds between the UK Exchange and Australian Exchange wallets.
:param Wallet from_: Source wallet
:param Wallet to: Destination wallet
:param float amount: Amount to transfer
"""
return self.make_api_request(
'Account',
'transferFunds',
utils.get_kwargs(locals()),
model=models.TransferResponse,
)
|
jmcarp/betfair.py | betfair/betfair.py | Betfair.iter_list_market_profit_and_loss | python | def iter_list_market_profit_and_loss(
self, market_ids, chunk_size, **kwargs):
return itertools.chain(*(
self.list_market_profit_and_loss(market_chunk, **kwargs)
for market_chunk in utils.get_chunks(market_ids, chunk_size)
)) | Split call to `list_market_profit_and_loss` into separate requests.
:param list market_ids: List of market IDs
:param int chunk_size: Number of records per chunk
:param dict kwargs: Arguments passed to `list_market_profit_and_loss` | train | https://github.com/jmcarp/betfair.py/blob/116df2fdc512575d1b4c4f1749d4a5bf98e519ff/betfair/betfair.py#L317-L328 | [
"def get_chunks(sequence, chunk_size):\n \"\"\"Split sequence into chunks.\n\n :param list sequence:\n :param int chunk_size:\n \"\"\"\n return [\n sequence[idx:idx + chunk_size]\n for idx in range(0, len(sequence), chunk_size)\n ]\n"
] | class Betfair(object):
"""Betfair API client.
:param str app_key: Optional application identifier
:param str cert_file: Path to self-signed SSL certificate file(s); may be
a *.pem file or a tuple of (*.crt, *.key) files
:param str content_type: Optional content type
:param str locale: Optional location ("australia", "italy", etc.)
:param Session session: Optional Requests session
:param int timeout: Optional timeout duration (seconds)
"""
def __init__(self, app_key, cert_file, content_type='application/json', locale=None,
session=None, timeout=None):
self.app_key = app_key
self.cert_file = cert_file
self.content_type = content_type
self.locale = locale
self.session = session or requests.Session()
self.session_token = None
self.timeout = timeout
@property
def identity_url(self):
return IDENTITY_URLS[self.locale]
@property
def api_url(self):
return API_URLS[self.locale]
@property
def headers(self):
return {
'X-Application': self.app_key,
'X-Authentication': self.session_token,
'Content-Type': self.content_type,
'Accept': 'application/json',
}
def make_auth_request(self, method):
response = self.session.post(
os.path.join(self.identity_url, method),
headers=self.headers,
timeout=self.timeout,
)
utils.check_status_code(response)
data = response.json()
if data.get('status') != 'SUCCESS':
raise exceptions.AuthError(response, data)
def make_api_request(self, base, method, params, codes=None, model=None):
payload = utils.make_payload(base, method, params)
response = self.session.post(
self.api_url,
data=json.dumps(payload, cls=utils.BetfairEncoder),
headers=self.headers,
timeout=self.timeout,
)
utils.check_status_code(response, codes=codes)
result = utils.result_or_error(response)
return utils.process_result(result, model)
# Authentication methods
def login(self, username, password):
"""Log in to Betfair. Sets `session_token` if successful.
:param str username: Username
:param str password: Password
:raises: BetfairLoginError
"""
response = self.session.post(
os.path.join(self.identity_url, 'certlogin'),
cert=self.cert_file,
data=urllib.urlencode({
'username': username,
'password': password,
}),
headers={
'X-Application': self.app_key,
'Content-Type': 'application/x-www-form-urlencoded',
},
timeout=self.timeout,
)
utils.check_status_code(response, [httplib.OK])
data = response.json()
if data.get('loginStatus') != 'SUCCESS':
raise exceptions.LoginError(response, data)
self.session_token = data['sessionToken']
@utils.requires_login
def keep_alive(self):
"""Reset session timeout.
:raises: AuthError
"""
self.make_auth_request('keepAlive')
@utils.requires_login
def logout(self):
"""Log out and clear `session_token`.
:raises: AuthError
"""
self.make_auth_request('logout')
self.session_token = None
# Bet query methods
@utils.requires_login
def list_event_types(self, filter=None, locale=None):
"""
:param MarketFilter filter:
:param str locale:
"""
filter = filter or models.MarketFilter()
return self.make_api_request(
'Sports',
'listEventTypes',
utils.get_kwargs(locals()),
model=models.EventTypeResult,
)
@utils.requires_login
def list_competitions(self, filter=None, locale=None):
"""
:param MarketFilter filter:
:param str locale:
"""
filter = filter or models.MarketFilter()
return self.make_api_request(
'Sports',
'listCompetitions',
utils.get_kwargs(locals()),
model=models.CompetitionResult,
)
@utils.requires_login
def list_time_ranges(self, granularity, filter=None):
"""
:param TimeGranularity granularity:
:param MarketFilter filter:
"""
filter = filter or models.MarketFilter()
return self.make_api_request(
'Sports',
'listTimeRanges',
utils.get_kwargs(locals()),
model=models.TimeRangeResult,
)
@utils.requires_login
def list_events(self, filter=None, locale=None):
"""
:param MarketFilter filter:
:param str locale:
"""
filter = filter or models.MarketFilter()
return self.make_api_request(
'Sports',
'listEvents',
utils.get_kwargs(locals()),
model=models.EventResult,
)
@utils.requires_login
def list_market_types(self, filter=None, locale=None):
"""
:param MarketFilter filter:
:param str locale:
"""
filter = filter or models.MarketFilter()
return self.make_api_request(
'Sports',
'listMarketTypes',
utils.get_kwargs(locals()),
model=models.MarketTypeResult,
)
@utils.requires_login
def list_countries(self, filter=None, locale=None):
"""
:param MarketFilter filter:
:param str locale:
"""
filter = filter or models.MarketFilter()
return self.make_api_request(
'Sports',
'listCountries',
utils.get_kwargs(locals()),
model=models.CountryCodeResult,
)
@utils.requires_login
def list_venues(self, filter=None, locale=None):
"""
:param MarketFilter filter:
:param str locale:
"""
filter = filter or models.MarketFilter()
return self.make_api_request(
'Sports',
'listVenues',
utils.get_kwargs(locals()),
model=models.VenueResult,
)
@utils.requires_login
def list_market_catalogue(
self, filter=None, max_results=100, market_projection=None, locale=None,
sort=None):
"""
:param MarketFilter filter:
:param int max_results:
:param list market_projection:
:param MarketSort sort:
:param str locale:
"""
filter = filter or models.MarketFilter()
return self.make_api_request(
'Sports',
'listMarketCatalogue',
utils.get_kwargs(locals()),
model=models.MarketCatalogue,
)
@utils.requires_login
def list_market_book(
self, market_ids, price_projection=None, order_projection=None,
match_projection=None, currency_code=None, locale=None):
"""
:param list market_ids: List of market IDs
:param PriceProjection price_projection:
:param OrderProjection order_projection:
:param MatchProjection match_projection:
:param str currency_code:
:param str locale:
"""
return self.make_api_request(
'Sports',
'listMarketBook',
utils.get_kwargs(locals()),
model=models.MarketBook,
)
@utils.requires_login
def list_market_profit_and_loss(
self, market_ids, include_settled_bets=False,
include_bsp_bets=None, net_of_commission=None):
"""Retrieve profit and loss for a given list of markets.
:param list market_ids: List of markets to calculate profit and loss
:param bool include_settled_bets: Option to include settled bets
:param bool include_bsp_bets: Option to include BSP bets
:param bool net_of_commission: Option to return profit and loss net of
users current commission rate for this market including any special
tariffs
"""
return self.make_api_request(
'Sports',
'listMarketProfitAndLoss',
utils.get_kwargs(locals()),
model=models.MarketProfitAndLoss,
)
# Chunked iterators for list methods
def iter_list_market_book(self, market_ids, chunk_size, **kwargs):
"""Split call to `list_market_book` into separate requests.
:param list market_ids: List of market IDs
:param int chunk_size: Number of records per chunk
:param dict kwargs: Arguments passed to `list_market_book`
"""
return itertools.chain(*(
self.list_market_book(market_chunk, **kwargs)
for market_chunk in utils.get_chunks(market_ids, chunk_size)
))
# Betting methods
@utils.requires_login
def list_current_orders(
self, bet_ids=None, market_ids=None, order_projection=None,
date_range=None, order_by=None, sort_dir=None, from_record=None,
record_count=None):
"""
:param bet_ids:
:param market_ids:
:param order_projection:
:param date_range:
:param order_by:
:param sort_dir:
:param from_record:
:param record_count:
"""
return self.make_api_request(
'Sports',
'listCurrentOrders',
utils.get_kwargs(locals()),
model=models.CurrentOrderSummaryReport,
)
@utils.requires_login
def list_cleared_orders(
self, bet_status, event_type_ids, event_ids, market_ids,
runner_ids, bet_ids, side, settled_date_range, group_by,
include_item_description, locale, from_record, record_count):
"""
:param bet_status:
:param event_type_ids:
:param event_ids:
:param market_ids:
:param runner_ids:
:param bet_ids:
:param side:
:param settled_date_range:
:param group_by:
:param include_item_description:
:param locale:
:param from_record:
:param record_count:
"""
return self.make_api_request(
'Sports',
'listClearedOrders',
utils.get_kwargs(locals()),
model=models.ClearedOrderSummaryReport,
)
@utils.requires_login
def place_orders(self, market_id, instructions, customer_ref=None):
"""Place new orders into market. This operation is atomic in that all
orders will be placed or none will be placed.
:param str market_id: The market id these orders are to be placed on
:param list instructions: List of `PlaceInstruction` objects
:param str customer_ref: Optional order identifier string
"""
return self.make_api_request(
'Sports',
'placeOrders',
utils.get_kwargs(locals()),
model=models.PlaceExecutionReport,
)
@utils.requires_login
def cancel_orders(self, market_id, instructions, customer_ref=None):
"""Cancel all bets OR cancel all bets on a market OR fully or
partially cancel particular orders on a market.
:param str market_id: If not supplied all bets are cancelled
:param list instructions: List of `CancelInstruction` objects
:param str customer_ref: Optional order identifier string
"""
return self.make_api_request(
'Sports',
'cancelOrders',
utils.get_kwargs(locals()),
model=models.CancelExecutionReport,
)
@utils.requires_login
def replace_orders(self, market_id, instructions, customer_ref=None):
"""This operation is logically a bulk cancel followed by a bulk place.
The cancel is completed first then the new orders are placed.
:param str market_id: The market id these orders are to be placed on
:param list instructions: List of `ReplaceInstruction` objects
:param str customer_ref: Optional order identifier string
"""
return self.make_api_request(
'Sports',
'replaceOrders',
utils.get_kwargs(locals()),
model=models.ReplaceExecutionReport,
)
@utils.requires_login
def update_orders(self, market_id, instructions, customer_ref=None):
"""Update non-exposure changing fields.
:param str market_id: The market id these orders are to be placed on
:param list instructions: List of `UpdateInstruction` objects
:param str customer_ref: Optional order identifier string
"""
return self.make_api_request(
'Sports',
'updateOrders',
utils.get_kwargs(locals()),
model=models.UpdateExecutionReport,
)
@utils.requires_login
def get_account_funds(self, wallet=None):
"""Get available to bet amount.
:param Wallet wallet: Name of the wallet in question
"""
return self.make_api_request(
'Account',
'getAccountFunds',
utils.get_kwargs(locals()),
model=models.AccountFundsResponse,
)
@utils.requires_login
def get_account_statement(
self, locale=None, from_record=None, record_count=None,
item_date_range=None, include_item=None, wallet=None):
"""Get account statement.
:param str locale: The language to be used where applicable
:param int from_record: Specifies the first record that will be returned
:param int record_count: Specifies the maximum number of records to be returned
:param TimeRange item_date_range: Return items with an itemDate within this date range
:param IncludeItem include_item: Which items to include
:param Wallet wallte: Which wallet to return statementItems for
"""
return self.make_api_request(
'Account',
'getAccountStatement',
utils.get_kwargs(locals()),
model=models.AccountStatementReport,
)
@utils.requires_login
def get_account_details(self):
"""Returns the details relating your account, including your discount
rate and Betfair point balance.
"""
return self.make_api_request(
'Account',
'getAccountDetails',
utils.get_kwargs(locals()),
model=models.AccountDetailsResponse,
)
@utils.requires_login
def list_currency_rates(self, from_currency=None):
"""Returns a list of currency rates based on given currency
:param str from_currency: The currency from which the rates are computed
"""
return self.make_api_request(
'Account',
'listCurrencyRates',
utils.get_kwargs(locals()),
model=models.CurrencyRate,
)
@utils.requires_login
def transfer_funds(self, from_, to, amount):
"""Transfer funds between the UK Exchange and Australian Exchange wallets.
:param Wallet from_: Source wallet
:param Wallet to: Destination wallet
:param float amount: Amount to transfer
"""
return self.make_api_request(
'Account',
'transferFunds',
utils.get_kwargs(locals()),
model=models.TransferResponse,
)
|
jmcarp/betfair.py | betfair/betfair.py | Betfair.place_orders | python | def place_orders(self, market_id, instructions, customer_ref=None):
return self.make_api_request(
'Sports',
'placeOrders',
utils.get_kwargs(locals()),
model=models.PlaceExecutionReport,
) | Place new orders into market. This operation is atomic in that all
orders will be placed or none will be placed.
:param str market_id: The market id these orders are to be placed on
:param list instructions: List of `PlaceInstruction` objects
:param str customer_ref: Optional order identifier string | train | https://github.com/jmcarp/betfair.py/blob/116df2fdc512575d1b4c4f1749d4a5bf98e519ff/betfair/betfair.py#L384-L397 | [
"def get_kwargs(kwargs):\n \"\"\"Get all keys and values from dictionary where key is not `self`.\n\n :param dict kwargs: Input parameters\n \"\"\"\n return {\n key: value for key, value in six.iteritems(kwargs)\n if key != 'self'\n }\n",
"def make_api_request(self, base, method, para... | class Betfair(object):
"""Betfair API client.
:param str app_key: Optional application identifier
:param str cert_file: Path to self-signed SSL certificate file(s); may be
a *.pem file or a tuple of (*.crt, *.key) files
:param str content_type: Optional content type
:param str locale: Optional location ("australia", "italy", etc.)
:param Session session: Optional Requests session
:param int timeout: Optional timeout duration (seconds)
"""
def __init__(self, app_key, cert_file, content_type='application/json', locale=None,
session=None, timeout=None):
self.app_key = app_key
self.cert_file = cert_file
self.content_type = content_type
self.locale = locale
self.session = session or requests.Session()
self.session_token = None
self.timeout = timeout
@property
def identity_url(self):
return IDENTITY_URLS[self.locale]
@property
def api_url(self):
return API_URLS[self.locale]
@property
def headers(self):
return {
'X-Application': self.app_key,
'X-Authentication': self.session_token,
'Content-Type': self.content_type,
'Accept': 'application/json',
}
def make_auth_request(self, method):
response = self.session.post(
os.path.join(self.identity_url, method),
headers=self.headers,
timeout=self.timeout,
)
utils.check_status_code(response)
data = response.json()
if data.get('status') != 'SUCCESS':
raise exceptions.AuthError(response, data)
def make_api_request(self, base, method, params, codes=None, model=None):
payload = utils.make_payload(base, method, params)
response = self.session.post(
self.api_url,
data=json.dumps(payload, cls=utils.BetfairEncoder),
headers=self.headers,
timeout=self.timeout,
)
utils.check_status_code(response, codes=codes)
result = utils.result_or_error(response)
return utils.process_result(result, model)
# Authentication methods
def login(self, username, password):
"""Log in to Betfair. Sets `session_token` if successful.
:param str username: Username
:param str password: Password
:raises: BetfairLoginError
"""
response = self.session.post(
os.path.join(self.identity_url, 'certlogin'),
cert=self.cert_file,
data=urllib.urlencode({
'username': username,
'password': password,
}),
headers={
'X-Application': self.app_key,
'Content-Type': 'application/x-www-form-urlencoded',
},
timeout=self.timeout,
)
utils.check_status_code(response, [httplib.OK])
data = response.json()
if data.get('loginStatus') != 'SUCCESS':
raise exceptions.LoginError(response, data)
self.session_token = data['sessionToken']
@utils.requires_login
def keep_alive(self):
"""Reset session timeout.
:raises: AuthError
"""
self.make_auth_request('keepAlive')
@utils.requires_login
def logout(self):
"""Log out and clear `session_token`.
:raises: AuthError
"""
self.make_auth_request('logout')
self.session_token = None
# Bet query methods
@utils.requires_login
def list_event_types(self, filter=None, locale=None):
"""
:param MarketFilter filter:
:param str locale:
"""
filter = filter or models.MarketFilter()
return self.make_api_request(
'Sports',
'listEventTypes',
utils.get_kwargs(locals()),
model=models.EventTypeResult,
)
@utils.requires_login
def list_competitions(self, filter=None, locale=None):
"""
:param MarketFilter filter:
:param str locale:
"""
filter = filter or models.MarketFilter()
return self.make_api_request(
'Sports',
'listCompetitions',
utils.get_kwargs(locals()),
model=models.CompetitionResult,
)
@utils.requires_login
def list_time_ranges(self, granularity, filter=None):
"""
:param TimeGranularity granularity:
:param MarketFilter filter:
"""
filter = filter or models.MarketFilter()
return self.make_api_request(
'Sports',
'listTimeRanges',
utils.get_kwargs(locals()),
model=models.TimeRangeResult,
)
@utils.requires_login
def list_events(self, filter=None, locale=None):
"""
:param MarketFilter filter:
:param str locale:
"""
filter = filter or models.MarketFilter()
return self.make_api_request(
'Sports',
'listEvents',
utils.get_kwargs(locals()),
model=models.EventResult,
)
@utils.requires_login
def list_market_types(self, filter=None, locale=None):
"""
:param MarketFilter filter:
:param str locale:
"""
filter = filter or models.MarketFilter()
return self.make_api_request(
'Sports',
'listMarketTypes',
utils.get_kwargs(locals()),
model=models.MarketTypeResult,
)
@utils.requires_login
def list_countries(self, filter=None, locale=None):
"""
:param MarketFilter filter:
:param str locale:
"""
filter = filter or models.MarketFilter()
return self.make_api_request(
'Sports',
'listCountries',
utils.get_kwargs(locals()),
model=models.CountryCodeResult,
)
@utils.requires_login
def list_venues(self, filter=None, locale=None):
"""
:param MarketFilter filter:
:param str locale:
"""
filter = filter or models.MarketFilter()
return self.make_api_request(
'Sports',
'listVenues',
utils.get_kwargs(locals()),
model=models.VenueResult,
)
@utils.requires_login
def list_market_catalogue(
self, filter=None, max_results=100, market_projection=None, locale=None,
sort=None):
"""
:param MarketFilter filter:
:param int max_results:
:param list market_projection:
:param MarketSort sort:
:param str locale:
"""
filter = filter or models.MarketFilter()
return self.make_api_request(
'Sports',
'listMarketCatalogue',
utils.get_kwargs(locals()),
model=models.MarketCatalogue,
)
@utils.requires_login
def list_market_book(
self, market_ids, price_projection=None, order_projection=None,
match_projection=None, currency_code=None, locale=None):
"""
:param list market_ids: List of market IDs
:param PriceProjection price_projection:
:param OrderProjection order_projection:
:param MatchProjection match_projection:
:param str currency_code:
:param str locale:
"""
return self.make_api_request(
'Sports',
'listMarketBook',
utils.get_kwargs(locals()),
model=models.MarketBook,
)
@utils.requires_login
def list_market_profit_and_loss(
self, market_ids, include_settled_bets=False,
include_bsp_bets=None, net_of_commission=None):
"""Retrieve profit and loss for a given list of markets.
:param list market_ids: List of markets to calculate profit and loss
:param bool include_settled_bets: Option to include settled bets
:param bool include_bsp_bets: Option to include BSP bets
:param bool net_of_commission: Option to return profit and loss net of
users current commission rate for this market including any special
tariffs
"""
return self.make_api_request(
'Sports',
'listMarketProfitAndLoss',
utils.get_kwargs(locals()),
model=models.MarketProfitAndLoss,
)
# Chunked iterators for list methods
def iter_list_market_book(self, market_ids, chunk_size, **kwargs):
"""Split call to `list_market_book` into separate requests.
:param list market_ids: List of market IDs
:param int chunk_size: Number of records per chunk
:param dict kwargs: Arguments passed to `list_market_book`
"""
return itertools.chain(*(
self.list_market_book(market_chunk, **kwargs)
for market_chunk in utils.get_chunks(market_ids, chunk_size)
))
def iter_list_market_profit_and_loss(
self, market_ids, chunk_size, **kwargs):
"""Split call to `list_market_profit_and_loss` into separate requests.
:param list market_ids: List of market IDs
:param int chunk_size: Number of records per chunk
:param dict kwargs: Arguments passed to `list_market_profit_and_loss`
"""
return itertools.chain(*(
self.list_market_profit_and_loss(market_chunk, **kwargs)
for market_chunk in utils.get_chunks(market_ids, chunk_size)
))
# Betting methods
@utils.requires_login
def list_current_orders(
self, bet_ids=None, market_ids=None, order_projection=None,
date_range=None, order_by=None, sort_dir=None, from_record=None,
record_count=None):
"""
:param bet_ids:
:param market_ids:
:param order_projection:
:param date_range:
:param order_by:
:param sort_dir:
:param from_record:
:param record_count:
"""
return self.make_api_request(
'Sports',
'listCurrentOrders',
utils.get_kwargs(locals()),
model=models.CurrentOrderSummaryReport,
)
@utils.requires_login
def list_cleared_orders(
self, bet_status, event_type_ids, event_ids, market_ids,
runner_ids, bet_ids, side, settled_date_range, group_by,
include_item_description, locale, from_record, record_count):
"""
:param bet_status:
:param event_type_ids:
:param event_ids:
:param market_ids:
:param runner_ids:
:param bet_ids:
:param side:
:param settled_date_range:
:param group_by:
:param include_item_description:
:param locale:
:param from_record:
:param record_count:
"""
return self.make_api_request(
'Sports',
'listClearedOrders',
utils.get_kwargs(locals()),
model=models.ClearedOrderSummaryReport,
)
@utils.requires_login
@utils.requires_login
def cancel_orders(self, market_id, instructions, customer_ref=None):
"""Cancel all bets OR cancel all bets on a market OR fully or
partially cancel particular orders on a market.
:param str market_id: If not supplied all bets are cancelled
:param list instructions: List of `CancelInstruction` objects
:param str customer_ref: Optional order identifier string
"""
return self.make_api_request(
'Sports',
'cancelOrders',
utils.get_kwargs(locals()),
model=models.CancelExecutionReport,
)
@utils.requires_login
def replace_orders(self, market_id, instructions, customer_ref=None):
"""This operation is logically a bulk cancel followed by a bulk place.
The cancel is completed first then the new orders are placed.
:param str market_id: The market id these orders are to be placed on
:param list instructions: List of `ReplaceInstruction` objects
:param str customer_ref: Optional order identifier string
"""
return self.make_api_request(
'Sports',
'replaceOrders',
utils.get_kwargs(locals()),
model=models.ReplaceExecutionReport,
)
@utils.requires_login
def update_orders(self, market_id, instructions, customer_ref=None):
"""Update non-exposure changing fields.
:param str market_id: The market id these orders are to be placed on
:param list instructions: List of `UpdateInstruction` objects
:param str customer_ref: Optional order identifier string
"""
return self.make_api_request(
'Sports',
'updateOrders',
utils.get_kwargs(locals()),
model=models.UpdateExecutionReport,
)
@utils.requires_login
def get_account_funds(self, wallet=None):
"""Get available to bet amount.
:param Wallet wallet: Name of the wallet in question
"""
return self.make_api_request(
'Account',
'getAccountFunds',
utils.get_kwargs(locals()),
model=models.AccountFundsResponse,
)
@utils.requires_login
def get_account_statement(
self, locale=None, from_record=None, record_count=None,
item_date_range=None, include_item=None, wallet=None):
"""Get account statement.
:param str locale: The language to be used where applicable
:param int from_record: Specifies the first record that will be returned
:param int record_count: Specifies the maximum number of records to be returned
:param TimeRange item_date_range: Return items with an itemDate within this date range
:param IncludeItem include_item: Which items to include
:param Wallet wallte: Which wallet to return statementItems for
"""
return self.make_api_request(
'Account',
'getAccountStatement',
utils.get_kwargs(locals()),
model=models.AccountStatementReport,
)
@utils.requires_login
def get_account_details(self):
"""Returns the details relating your account, including your discount
rate and Betfair point balance.
"""
return self.make_api_request(
'Account',
'getAccountDetails',
utils.get_kwargs(locals()),
model=models.AccountDetailsResponse,
)
@utils.requires_login
def list_currency_rates(self, from_currency=None):
"""Returns a list of currency rates based on given currency
:param str from_currency: The currency from which the rates are computed
"""
return self.make_api_request(
'Account',
'listCurrencyRates',
utils.get_kwargs(locals()),
model=models.CurrencyRate,
)
@utils.requires_login
def transfer_funds(self, from_, to, amount):
"""Transfer funds between the UK Exchange and Australian Exchange wallets.
:param Wallet from_: Source wallet
:param Wallet to: Destination wallet
:param float amount: Amount to transfer
"""
return self.make_api_request(
'Account',
'transferFunds',
utils.get_kwargs(locals()),
model=models.TransferResponse,
)
|
jmcarp/betfair.py | betfair/betfair.py | Betfair.cancel_orders | python | def cancel_orders(self, market_id, instructions, customer_ref=None):
return self.make_api_request(
'Sports',
'cancelOrders',
utils.get_kwargs(locals()),
model=models.CancelExecutionReport,
) | Cancel all bets OR cancel all bets on a market OR fully or
partially cancel particular orders on a market.
:param str market_id: If not supplied all bets are cancelled
:param list instructions: List of `CancelInstruction` objects
:param str customer_ref: Optional order identifier string | train | https://github.com/jmcarp/betfair.py/blob/116df2fdc512575d1b4c4f1749d4a5bf98e519ff/betfair/betfair.py#L400-L413 | [
"def get_kwargs(kwargs):\n \"\"\"Get all keys and values from dictionary where key is not `self`.\n\n :param dict kwargs: Input parameters\n \"\"\"\n return {\n key: value for key, value in six.iteritems(kwargs)\n if key != 'self'\n }\n",
"def make_api_request(self, base, method, para... | class Betfair(object):
"""Betfair API client.
:param str app_key: Optional application identifier
:param str cert_file: Path to self-signed SSL certificate file(s); may be
a *.pem file or a tuple of (*.crt, *.key) files
:param str content_type: Optional content type
:param str locale: Optional location ("australia", "italy", etc.)
:param Session session: Optional Requests session
:param int timeout: Optional timeout duration (seconds)
"""
def __init__(self, app_key, cert_file, content_type='application/json', locale=None,
session=None, timeout=None):
self.app_key = app_key
self.cert_file = cert_file
self.content_type = content_type
self.locale = locale
self.session = session or requests.Session()
self.session_token = None
self.timeout = timeout
@property
def identity_url(self):
return IDENTITY_URLS[self.locale]
@property
def api_url(self):
return API_URLS[self.locale]
@property
def headers(self):
return {
'X-Application': self.app_key,
'X-Authentication': self.session_token,
'Content-Type': self.content_type,
'Accept': 'application/json',
}
def make_auth_request(self, method):
response = self.session.post(
os.path.join(self.identity_url, method),
headers=self.headers,
timeout=self.timeout,
)
utils.check_status_code(response)
data = response.json()
if data.get('status') != 'SUCCESS':
raise exceptions.AuthError(response, data)
def make_api_request(self, base, method, params, codes=None, model=None):
payload = utils.make_payload(base, method, params)
response = self.session.post(
self.api_url,
data=json.dumps(payload, cls=utils.BetfairEncoder),
headers=self.headers,
timeout=self.timeout,
)
utils.check_status_code(response, codes=codes)
result = utils.result_or_error(response)
return utils.process_result(result, model)
# Authentication methods
def login(self, username, password):
"""Log in to Betfair. Sets `session_token` if successful.
:param str username: Username
:param str password: Password
:raises: BetfairLoginError
"""
response = self.session.post(
os.path.join(self.identity_url, 'certlogin'),
cert=self.cert_file,
data=urllib.urlencode({
'username': username,
'password': password,
}),
headers={
'X-Application': self.app_key,
'Content-Type': 'application/x-www-form-urlencoded',
},
timeout=self.timeout,
)
utils.check_status_code(response, [httplib.OK])
data = response.json()
if data.get('loginStatus') != 'SUCCESS':
raise exceptions.LoginError(response, data)
self.session_token = data['sessionToken']
@utils.requires_login
def keep_alive(self):
"""Reset session timeout.
:raises: AuthError
"""
self.make_auth_request('keepAlive')
@utils.requires_login
def logout(self):
"""Log out and clear `session_token`.
:raises: AuthError
"""
self.make_auth_request('logout')
self.session_token = None
# Bet query methods
@utils.requires_login
def list_event_types(self, filter=None, locale=None):
"""
:param MarketFilter filter:
:param str locale:
"""
filter = filter or models.MarketFilter()
return self.make_api_request(
'Sports',
'listEventTypes',
utils.get_kwargs(locals()),
model=models.EventTypeResult,
)
@utils.requires_login
def list_competitions(self, filter=None, locale=None):
"""
:param MarketFilter filter:
:param str locale:
"""
filter = filter or models.MarketFilter()
return self.make_api_request(
'Sports',
'listCompetitions',
utils.get_kwargs(locals()),
model=models.CompetitionResult,
)
@utils.requires_login
def list_time_ranges(self, granularity, filter=None):
"""
:param TimeGranularity granularity:
:param MarketFilter filter:
"""
filter = filter or models.MarketFilter()
return self.make_api_request(
'Sports',
'listTimeRanges',
utils.get_kwargs(locals()),
model=models.TimeRangeResult,
)
@utils.requires_login
def list_events(self, filter=None, locale=None):
"""
:param MarketFilter filter:
:param str locale:
"""
filter = filter or models.MarketFilter()
return self.make_api_request(
'Sports',
'listEvents',
utils.get_kwargs(locals()),
model=models.EventResult,
)
@utils.requires_login
def list_market_types(self, filter=None, locale=None):
"""
:param MarketFilter filter:
:param str locale:
"""
filter = filter or models.MarketFilter()
return self.make_api_request(
'Sports',
'listMarketTypes',
utils.get_kwargs(locals()),
model=models.MarketTypeResult,
)
@utils.requires_login
def list_countries(self, filter=None, locale=None):
"""
:param MarketFilter filter:
:param str locale:
"""
filter = filter or models.MarketFilter()
return self.make_api_request(
'Sports',
'listCountries',
utils.get_kwargs(locals()),
model=models.CountryCodeResult,
)
@utils.requires_login
def list_venues(self, filter=None, locale=None):
"""
:param MarketFilter filter:
:param str locale:
"""
filter = filter or models.MarketFilter()
return self.make_api_request(
'Sports',
'listVenues',
utils.get_kwargs(locals()),
model=models.VenueResult,
)
@utils.requires_login
def list_market_catalogue(
self, filter=None, max_results=100, market_projection=None, locale=None,
sort=None):
"""
:param MarketFilter filter:
:param int max_results:
:param list market_projection:
:param MarketSort sort:
:param str locale:
"""
filter = filter or models.MarketFilter()
return self.make_api_request(
'Sports',
'listMarketCatalogue',
utils.get_kwargs(locals()),
model=models.MarketCatalogue,
)
@utils.requires_login
def list_market_book(
self, market_ids, price_projection=None, order_projection=None,
match_projection=None, currency_code=None, locale=None):
"""
:param list market_ids: List of market IDs
:param PriceProjection price_projection:
:param OrderProjection order_projection:
:param MatchProjection match_projection:
:param str currency_code:
:param str locale:
"""
return self.make_api_request(
'Sports',
'listMarketBook',
utils.get_kwargs(locals()),
model=models.MarketBook,
)
@utils.requires_login
def list_market_profit_and_loss(
self, market_ids, include_settled_bets=False,
include_bsp_bets=None, net_of_commission=None):
"""Retrieve profit and loss for a given list of markets.
:param list market_ids: List of markets to calculate profit and loss
:param bool include_settled_bets: Option to include settled bets
:param bool include_bsp_bets: Option to include BSP bets
:param bool net_of_commission: Option to return profit and loss net of
users current commission rate for this market including any special
tariffs
"""
return self.make_api_request(
'Sports',
'listMarketProfitAndLoss',
utils.get_kwargs(locals()),
model=models.MarketProfitAndLoss,
)
# Chunked iterators for list methods
def iter_list_market_book(self, market_ids, chunk_size, **kwargs):
"""Split call to `list_market_book` into separate requests.
:param list market_ids: List of market IDs
:param int chunk_size: Number of records per chunk
:param dict kwargs: Arguments passed to `list_market_book`
"""
return itertools.chain(*(
self.list_market_book(market_chunk, **kwargs)
for market_chunk in utils.get_chunks(market_ids, chunk_size)
))
def iter_list_market_profit_and_loss(
self, market_ids, chunk_size, **kwargs):
"""Split call to `list_market_profit_and_loss` into separate requests.
:param list market_ids: List of market IDs
:param int chunk_size: Number of records per chunk
:param dict kwargs: Arguments passed to `list_market_profit_and_loss`
"""
return itertools.chain(*(
self.list_market_profit_and_loss(market_chunk, **kwargs)
for market_chunk in utils.get_chunks(market_ids, chunk_size)
))
# Betting methods
@utils.requires_login
def list_current_orders(
self, bet_ids=None, market_ids=None, order_projection=None,
date_range=None, order_by=None, sort_dir=None, from_record=None,
record_count=None):
"""
:param bet_ids:
:param market_ids:
:param order_projection:
:param date_range:
:param order_by:
:param sort_dir:
:param from_record:
:param record_count:
"""
return self.make_api_request(
'Sports',
'listCurrentOrders',
utils.get_kwargs(locals()),
model=models.CurrentOrderSummaryReport,
)
@utils.requires_login
def list_cleared_orders(
self, bet_status, event_type_ids, event_ids, market_ids,
runner_ids, bet_ids, side, settled_date_range, group_by,
include_item_description, locale, from_record, record_count):
"""
:param bet_status:
:param event_type_ids:
:param event_ids:
:param market_ids:
:param runner_ids:
:param bet_ids:
:param side:
:param settled_date_range:
:param group_by:
:param include_item_description:
:param locale:
:param from_record:
:param record_count:
"""
return self.make_api_request(
'Sports',
'listClearedOrders',
utils.get_kwargs(locals()),
model=models.ClearedOrderSummaryReport,
)
@utils.requires_login
def place_orders(self, market_id, instructions, customer_ref=None):
"""Place new orders into market. This operation is atomic in that all
orders will be placed or none will be placed.
:param str market_id: The market id these orders are to be placed on
:param list instructions: List of `PlaceInstruction` objects
:param str customer_ref: Optional order identifier string
"""
return self.make_api_request(
'Sports',
'placeOrders',
utils.get_kwargs(locals()),
model=models.PlaceExecutionReport,
)
@utils.requires_login
@utils.requires_login
def replace_orders(self, market_id, instructions, customer_ref=None):
"""This operation is logically a bulk cancel followed by a bulk place.
The cancel is completed first then the new orders are placed.
:param str market_id: The market id these orders are to be placed on
:param list instructions: List of `ReplaceInstruction` objects
:param str customer_ref: Optional order identifier string
"""
return self.make_api_request(
'Sports',
'replaceOrders',
utils.get_kwargs(locals()),
model=models.ReplaceExecutionReport,
)
@utils.requires_login
def update_orders(self, market_id, instructions, customer_ref=None):
"""Update non-exposure changing fields.
:param str market_id: The market id these orders are to be placed on
:param list instructions: List of `UpdateInstruction` objects
:param str customer_ref: Optional order identifier string
"""
return self.make_api_request(
'Sports',
'updateOrders',
utils.get_kwargs(locals()),
model=models.UpdateExecutionReport,
)
@utils.requires_login
def get_account_funds(self, wallet=None):
"""Get available to bet amount.
:param Wallet wallet: Name of the wallet in question
"""
return self.make_api_request(
'Account',
'getAccountFunds',
utils.get_kwargs(locals()),
model=models.AccountFundsResponse,
)
@utils.requires_login
def get_account_statement(
self, locale=None, from_record=None, record_count=None,
item_date_range=None, include_item=None, wallet=None):
"""Get account statement.
:param str locale: The language to be used where applicable
:param int from_record: Specifies the first record that will be returned
:param int record_count: Specifies the maximum number of records to be returned
:param TimeRange item_date_range: Return items with an itemDate within this date range
:param IncludeItem include_item: Which items to include
:param Wallet wallte: Which wallet to return statementItems for
"""
return self.make_api_request(
'Account',
'getAccountStatement',
utils.get_kwargs(locals()),
model=models.AccountStatementReport,
)
@utils.requires_login
def get_account_details(self):
"""Returns the details relating your account, including your discount
rate and Betfair point balance.
"""
return self.make_api_request(
'Account',
'getAccountDetails',
utils.get_kwargs(locals()),
model=models.AccountDetailsResponse,
)
@utils.requires_login
def list_currency_rates(self, from_currency=None):
"""Returns a list of currency rates based on given currency
:param str from_currency: The currency from which the rates are computed
"""
return self.make_api_request(
'Account',
'listCurrencyRates',
utils.get_kwargs(locals()),
model=models.CurrencyRate,
)
@utils.requires_login
def transfer_funds(self, from_, to, amount):
"""Transfer funds between the UK Exchange and Australian Exchange wallets.
:param Wallet from_: Source wallet
:param Wallet to: Destination wallet
:param float amount: Amount to transfer
"""
return self.make_api_request(
'Account',
'transferFunds',
utils.get_kwargs(locals()),
model=models.TransferResponse,
)
|
jmcarp/betfair.py | betfair/betfair.py | Betfair.replace_orders | python | def replace_orders(self, market_id, instructions, customer_ref=None):
return self.make_api_request(
'Sports',
'replaceOrders',
utils.get_kwargs(locals()),
model=models.ReplaceExecutionReport,
) | This operation is logically a bulk cancel followed by a bulk place.
The cancel is completed first then the new orders are placed.
:param str market_id: The market id these orders are to be placed on
:param list instructions: List of `ReplaceInstruction` objects
:param str customer_ref: Optional order identifier string | train | https://github.com/jmcarp/betfair.py/blob/116df2fdc512575d1b4c4f1749d4a5bf98e519ff/betfair/betfair.py#L416-L429 | [
"def get_kwargs(kwargs):\n \"\"\"Get all keys and values from dictionary where key is not `self`.\n\n :param dict kwargs: Input parameters\n \"\"\"\n return {\n key: value for key, value in six.iteritems(kwargs)\n if key != 'self'\n }\n",
"def make_api_request(self, base, method, para... | class Betfair(object):
"""Betfair API client.
:param str app_key: Optional application identifier
:param str cert_file: Path to self-signed SSL certificate file(s); may be
a *.pem file or a tuple of (*.crt, *.key) files
:param str content_type: Optional content type
:param str locale: Optional location ("australia", "italy", etc.)
:param Session session: Optional Requests session
:param int timeout: Optional timeout duration (seconds)
"""
def __init__(self, app_key, cert_file, content_type='application/json', locale=None,
session=None, timeout=None):
self.app_key = app_key
self.cert_file = cert_file
self.content_type = content_type
self.locale = locale
self.session = session or requests.Session()
self.session_token = None
self.timeout = timeout
@property
def identity_url(self):
return IDENTITY_URLS[self.locale]
@property
def api_url(self):
return API_URLS[self.locale]
@property
def headers(self):
return {
'X-Application': self.app_key,
'X-Authentication': self.session_token,
'Content-Type': self.content_type,
'Accept': 'application/json',
}
def make_auth_request(self, method):
response = self.session.post(
os.path.join(self.identity_url, method),
headers=self.headers,
timeout=self.timeout,
)
utils.check_status_code(response)
data = response.json()
if data.get('status') != 'SUCCESS':
raise exceptions.AuthError(response, data)
def make_api_request(self, base, method, params, codes=None, model=None):
payload = utils.make_payload(base, method, params)
response = self.session.post(
self.api_url,
data=json.dumps(payload, cls=utils.BetfairEncoder),
headers=self.headers,
timeout=self.timeout,
)
utils.check_status_code(response, codes=codes)
result = utils.result_or_error(response)
return utils.process_result(result, model)
# Authentication methods
def login(self, username, password):
"""Log in to Betfair. Sets `session_token` if successful.
:param str username: Username
:param str password: Password
:raises: BetfairLoginError
"""
response = self.session.post(
os.path.join(self.identity_url, 'certlogin'),
cert=self.cert_file,
data=urllib.urlencode({
'username': username,
'password': password,
}),
headers={
'X-Application': self.app_key,
'Content-Type': 'application/x-www-form-urlencoded',
},
timeout=self.timeout,
)
utils.check_status_code(response, [httplib.OK])
data = response.json()
if data.get('loginStatus') != 'SUCCESS':
raise exceptions.LoginError(response, data)
self.session_token = data['sessionToken']
@utils.requires_login
def keep_alive(self):
"""Reset session timeout.
:raises: AuthError
"""
self.make_auth_request('keepAlive')
@utils.requires_login
def logout(self):
"""Log out and clear `session_token`.
:raises: AuthError
"""
self.make_auth_request('logout')
self.session_token = None
# Bet query methods
@utils.requires_login
def list_event_types(self, filter=None, locale=None):
"""
:param MarketFilter filter:
:param str locale:
"""
filter = filter or models.MarketFilter()
return self.make_api_request(
'Sports',
'listEventTypes',
utils.get_kwargs(locals()),
model=models.EventTypeResult,
)
@utils.requires_login
def list_competitions(self, filter=None, locale=None):
"""
:param MarketFilter filter:
:param str locale:
"""
filter = filter or models.MarketFilter()
return self.make_api_request(
'Sports',
'listCompetitions',
utils.get_kwargs(locals()),
model=models.CompetitionResult,
)
@utils.requires_login
def list_time_ranges(self, granularity, filter=None):
"""
:param TimeGranularity granularity:
:param MarketFilter filter:
"""
filter = filter or models.MarketFilter()
return self.make_api_request(
'Sports',
'listTimeRanges',
utils.get_kwargs(locals()),
model=models.TimeRangeResult,
)
@utils.requires_login
def list_events(self, filter=None, locale=None):
"""
:param MarketFilter filter:
:param str locale:
"""
filter = filter or models.MarketFilter()
return self.make_api_request(
'Sports',
'listEvents',
utils.get_kwargs(locals()),
model=models.EventResult,
)
@utils.requires_login
def list_market_types(self, filter=None, locale=None):
"""
:param MarketFilter filter:
:param str locale:
"""
filter = filter or models.MarketFilter()
return self.make_api_request(
'Sports',
'listMarketTypes',
utils.get_kwargs(locals()),
model=models.MarketTypeResult,
)
@utils.requires_login
def list_countries(self, filter=None, locale=None):
"""
:param MarketFilter filter:
:param str locale:
"""
filter = filter or models.MarketFilter()
return self.make_api_request(
'Sports',
'listCountries',
utils.get_kwargs(locals()),
model=models.CountryCodeResult,
)
@utils.requires_login
def list_venues(self, filter=None, locale=None):
"""
:param MarketFilter filter:
:param str locale:
"""
filter = filter or models.MarketFilter()
return self.make_api_request(
'Sports',
'listVenues',
utils.get_kwargs(locals()),
model=models.VenueResult,
)
@utils.requires_login
def list_market_catalogue(
self, filter=None, max_results=100, market_projection=None, locale=None,
sort=None):
"""
:param MarketFilter filter:
:param int max_results:
:param list market_projection:
:param MarketSort sort:
:param str locale:
"""
filter = filter or models.MarketFilter()
return self.make_api_request(
'Sports',
'listMarketCatalogue',
utils.get_kwargs(locals()),
model=models.MarketCatalogue,
)
@utils.requires_login
def list_market_book(
self, market_ids, price_projection=None, order_projection=None,
match_projection=None, currency_code=None, locale=None):
"""
:param list market_ids: List of market IDs
:param PriceProjection price_projection:
:param OrderProjection order_projection:
:param MatchProjection match_projection:
:param str currency_code:
:param str locale:
"""
return self.make_api_request(
'Sports',
'listMarketBook',
utils.get_kwargs(locals()),
model=models.MarketBook,
)
@utils.requires_login
def list_market_profit_and_loss(
self, market_ids, include_settled_bets=False,
include_bsp_bets=None, net_of_commission=None):
"""Retrieve profit and loss for a given list of markets.
:param list market_ids: List of markets to calculate profit and loss
:param bool include_settled_bets: Option to include settled bets
:param bool include_bsp_bets: Option to include BSP bets
:param bool net_of_commission: Option to return profit and loss net of
users current commission rate for this market including any special
tariffs
"""
return self.make_api_request(
'Sports',
'listMarketProfitAndLoss',
utils.get_kwargs(locals()),
model=models.MarketProfitAndLoss,
)
# Chunked iterators for list methods
def iter_list_market_book(self, market_ids, chunk_size, **kwargs):
"""Split call to `list_market_book` into separate requests.
:param list market_ids: List of market IDs
:param int chunk_size: Number of records per chunk
:param dict kwargs: Arguments passed to `list_market_book`
"""
return itertools.chain(*(
self.list_market_book(market_chunk, **kwargs)
for market_chunk in utils.get_chunks(market_ids, chunk_size)
))
def iter_list_market_profit_and_loss(
self, market_ids, chunk_size, **kwargs):
"""Split call to `list_market_profit_and_loss` into separate requests.
:param list market_ids: List of market IDs
:param int chunk_size: Number of records per chunk
:param dict kwargs: Arguments passed to `list_market_profit_and_loss`
"""
return itertools.chain(*(
self.list_market_profit_and_loss(market_chunk, **kwargs)
for market_chunk in utils.get_chunks(market_ids, chunk_size)
))
# Betting methods
@utils.requires_login
def list_current_orders(
self, bet_ids=None, market_ids=None, order_projection=None,
date_range=None, order_by=None, sort_dir=None, from_record=None,
record_count=None):
"""
:param bet_ids:
:param market_ids:
:param order_projection:
:param date_range:
:param order_by:
:param sort_dir:
:param from_record:
:param record_count:
"""
return self.make_api_request(
'Sports',
'listCurrentOrders',
utils.get_kwargs(locals()),
model=models.CurrentOrderSummaryReport,
)
@utils.requires_login
def list_cleared_orders(
self, bet_status, event_type_ids, event_ids, market_ids,
runner_ids, bet_ids, side, settled_date_range, group_by,
include_item_description, locale, from_record, record_count):
"""
:param bet_status:
:param event_type_ids:
:param event_ids:
:param market_ids:
:param runner_ids:
:param bet_ids:
:param side:
:param settled_date_range:
:param group_by:
:param include_item_description:
:param locale:
:param from_record:
:param record_count:
"""
return self.make_api_request(
'Sports',
'listClearedOrders',
utils.get_kwargs(locals()),
model=models.ClearedOrderSummaryReport,
)
@utils.requires_login
def place_orders(self, market_id, instructions, customer_ref=None):
"""Place new orders into market. This operation is atomic in that all
orders will be placed or none will be placed.
:param str market_id: The market id these orders are to be placed on
:param list instructions: List of `PlaceInstruction` objects
:param str customer_ref: Optional order identifier string
"""
return self.make_api_request(
'Sports',
'placeOrders',
utils.get_kwargs(locals()),
model=models.PlaceExecutionReport,
)
@utils.requires_login
def cancel_orders(self, market_id, instructions, customer_ref=None):
"""Cancel all bets OR cancel all bets on a market OR fully or
partially cancel particular orders on a market.
:param str market_id: If not supplied all bets are cancelled
:param list instructions: List of `CancelInstruction` objects
:param str customer_ref: Optional order identifier string
"""
return self.make_api_request(
'Sports',
'cancelOrders',
utils.get_kwargs(locals()),
model=models.CancelExecutionReport,
)
@utils.requires_login
@utils.requires_login
def update_orders(self, market_id, instructions, customer_ref=None):
"""Update non-exposure changing fields.
:param str market_id: The market id these orders are to be placed on
:param list instructions: List of `UpdateInstruction` objects
:param str customer_ref: Optional order identifier string
"""
return self.make_api_request(
'Sports',
'updateOrders',
utils.get_kwargs(locals()),
model=models.UpdateExecutionReport,
)
@utils.requires_login
def get_account_funds(self, wallet=None):
"""Get available to bet amount.
:param Wallet wallet: Name of the wallet in question
"""
return self.make_api_request(
'Account',
'getAccountFunds',
utils.get_kwargs(locals()),
model=models.AccountFundsResponse,
)
@utils.requires_login
def get_account_statement(
self, locale=None, from_record=None, record_count=None,
item_date_range=None, include_item=None, wallet=None):
"""Get account statement.
:param str locale: The language to be used where applicable
:param int from_record: Specifies the first record that will be returned
:param int record_count: Specifies the maximum number of records to be returned
:param TimeRange item_date_range: Return items with an itemDate within this date range
:param IncludeItem include_item: Which items to include
:param Wallet wallte: Which wallet to return statementItems for
"""
return self.make_api_request(
'Account',
'getAccountStatement',
utils.get_kwargs(locals()),
model=models.AccountStatementReport,
)
@utils.requires_login
def get_account_details(self):
"""Returns the details relating your account, including your discount
rate and Betfair point balance.
"""
return self.make_api_request(
'Account',
'getAccountDetails',
utils.get_kwargs(locals()),
model=models.AccountDetailsResponse,
)
@utils.requires_login
def list_currency_rates(self, from_currency=None):
"""Returns a list of currency rates based on given currency
:param str from_currency: The currency from which the rates are computed
"""
return self.make_api_request(
'Account',
'listCurrencyRates',
utils.get_kwargs(locals()),
model=models.CurrencyRate,
)
@utils.requires_login
def transfer_funds(self, from_, to, amount):
"""Transfer funds between the UK Exchange and Australian Exchange wallets.
:param Wallet from_: Source wallet
:param Wallet to: Destination wallet
:param float amount: Amount to transfer
"""
return self.make_api_request(
'Account',
'transferFunds',
utils.get_kwargs(locals()),
model=models.TransferResponse,
)
|
jmcarp/betfair.py | betfair/betfair.py | Betfair.update_orders | python | def update_orders(self, market_id, instructions, customer_ref=None):
return self.make_api_request(
'Sports',
'updateOrders',
utils.get_kwargs(locals()),
model=models.UpdateExecutionReport,
) | Update non-exposure changing fields.
:param str market_id: The market id these orders are to be placed on
:param list instructions: List of `UpdateInstruction` objects
:param str customer_ref: Optional order identifier string | train | https://github.com/jmcarp/betfair.py/blob/116df2fdc512575d1b4c4f1749d4a5bf98e519ff/betfair/betfair.py#L432-L444 | [
"def get_kwargs(kwargs):\n \"\"\"Get all keys and values from dictionary where key is not `self`.\n\n :param dict kwargs: Input parameters\n \"\"\"\n return {\n key: value for key, value in six.iteritems(kwargs)\n if key != 'self'\n }\n",
"def make_api_request(self, base, method, para... | class Betfair(object):
"""Betfair API client.
:param str app_key: Optional application identifier
:param str cert_file: Path to self-signed SSL certificate file(s); may be
a *.pem file or a tuple of (*.crt, *.key) files
:param str content_type: Optional content type
:param str locale: Optional location ("australia", "italy", etc.)
:param Session session: Optional Requests session
:param int timeout: Optional timeout duration (seconds)
"""
def __init__(self, app_key, cert_file, content_type='application/json', locale=None,
session=None, timeout=None):
self.app_key = app_key
self.cert_file = cert_file
self.content_type = content_type
self.locale = locale
self.session = session or requests.Session()
self.session_token = None
self.timeout = timeout
@property
def identity_url(self):
return IDENTITY_URLS[self.locale]
@property
def api_url(self):
return API_URLS[self.locale]
@property
def headers(self):
return {
'X-Application': self.app_key,
'X-Authentication': self.session_token,
'Content-Type': self.content_type,
'Accept': 'application/json',
}
def make_auth_request(self, method):
response = self.session.post(
os.path.join(self.identity_url, method),
headers=self.headers,
timeout=self.timeout,
)
utils.check_status_code(response)
data = response.json()
if data.get('status') != 'SUCCESS':
raise exceptions.AuthError(response, data)
def make_api_request(self, base, method, params, codes=None, model=None):
payload = utils.make_payload(base, method, params)
response = self.session.post(
self.api_url,
data=json.dumps(payload, cls=utils.BetfairEncoder),
headers=self.headers,
timeout=self.timeout,
)
utils.check_status_code(response, codes=codes)
result = utils.result_or_error(response)
return utils.process_result(result, model)
# Authentication methods
def login(self, username, password):
"""Log in to Betfair. Sets `session_token` if successful.
:param str username: Username
:param str password: Password
:raises: BetfairLoginError
"""
response = self.session.post(
os.path.join(self.identity_url, 'certlogin'),
cert=self.cert_file,
data=urllib.urlencode({
'username': username,
'password': password,
}),
headers={
'X-Application': self.app_key,
'Content-Type': 'application/x-www-form-urlencoded',
},
timeout=self.timeout,
)
utils.check_status_code(response, [httplib.OK])
data = response.json()
if data.get('loginStatus') != 'SUCCESS':
raise exceptions.LoginError(response, data)
self.session_token = data['sessionToken']
@utils.requires_login
def keep_alive(self):
"""Reset session timeout.
:raises: AuthError
"""
self.make_auth_request('keepAlive')
@utils.requires_login
def logout(self):
"""Log out and clear `session_token`.
:raises: AuthError
"""
self.make_auth_request('logout')
self.session_token = None
# Bet query methods
@utils.requires_login
def list_event_types(self, filter=None, locale=None):
"""
:param MarketFilter filter:
:param str locale:
"""
filter = filter or models.MarketFilter()
return self.make_api_request(
'Sports',
'listEventTypes',
utils.get_kwargs(locals()),
model=models.EventTypeResult,
)
@utils.requires_login
def list_competitions(self, filter=None, locale=None):
"""
:param MarketFilter filter:
:param str locale:
"""
filter = filter or models.MarketFilter()
return self.make_api_request(
'Sports',
'listCompetitions',
utils.get_kwargs(locals()),
model=models.CompetitionResult,
)
@utils.requires_login
def list_time_ranges(self, granularity, filter=None):
"""
:param TimeGranularity granularity:
:param MarketFilter filter:
"""
filter = filter or models.MarketFilter()
return self.make_api_request(
'Sports',
'listTimeRanges',
utils.get_kwargs(locals()),
model=models.TimeRangeResult,
)
@utils.requires_login
def list_events(self, filter=None, locale=None):
"""
:param MarketFilter filter:
:param str locale:
"""
filter = filter or models.MarketFilter()
return self.make_api_request(
'Sports',
'listEvents',
utils.get_kwargs(locals()),
model=models.EventResult,
)
@utils.requires_login
def list_market_types(self, filter=None, locale=None):
"""
:param MarketFilter filter:
:param str locale:
"""
filter = filter or models.MarketFilter()
return self.make_api_request(
'Sports',
'listMarketTypes',
utils.get_kwargs(locals()),
model=models.MarketTypeResult,
)
@utils.requires_login
def list_countries(self, filter=None, locale=None):
"""
:param MarketFilter filter:
:param str locale:
"""
filter = filter or models.MarketFilter()
return self.make_api_request(
'Sports',
'listCountries',
utils.get_kwargs(locals()),
model=models.CountryCodeResult,
)
@utils.requires_login
def list_venues(self, filter=None, locale=None):
"""
:param MarketFilter filter:
:param str locale:
"""
filter = filter or models.MarketFilter()
return self.make_api_request(
'Sports',
'listVenues',
utils.get_kwargs(locals()),
model=models.VenueResult,
)
@utils.requires_login
def list_market_catalogue(
self, filter=None, max_results=100, market_projection=None, locale=None,
sort=None):
"""
:param MarketFilter filter:
:param int max_results:
:param list market_projection:
:param MarketSort sort:
:param str locale:
"""
filter = filter or models.MarketFilter()
return self.make_api_request(
'Sports',
'listMarketCatalogue',
utils.get_kwargs(locals()),
model=models.MarketCatalogue,
)
@utils.requires_login
def list_market_book(
self, market_ids, price_projection=None, order_projection=None,
match_projection=None, currency_code=None, locale=None):
"""
:param list market_ids: List of market IDs
:param PriceProjection price_projection:
:param OrderProjection order_projection:
:param MatchProjection match_projection:
:param str currency_code:
:param str locale:
"""
return self.make_api_request(
'Sports',
'listMarketBook',
utils.get_kwargs(locals()),
model=models.MarketBook,
)
@utils.requires_login
def list_market_profit_and_loss(
self, market_ids, include_settled_bets=False,
include_bsp_bets=None, net_of_commission=None):
"""Retrieve profit and loss for a given list of markets.
:param list market_ids: List of markets to calculate profit and loss
:param bool include_settled_bets: Option to include settled bets
:param bool include_bsp_bets: Option to include BSP bets
:param bool net_of_commission: Option to return profit and loss net of
users current commission rate for this market including any special
tariffs
"""
return self.make_api_request(
'Sports',
'listMarketProfitAndLoss',
utils.get_kwargs(locals()),
model=models.MarketProfitAndLoss,
)
# Chunked iterators for list methods
def iter_list_market_book(self, market_ids, chunk_size, **kwargs):
"""Split call to `list_market_book` into separate requests.
:param list market_ids: List of market IDs
:param int chunk_size: Number of records per chunk
:param dict kwargs: Arguments passed to `list_market_book`
"""
return itertools.chain(*(
self.list_market_book(market_chunk, **kwargs)
for market_chunk in utils.get_chunks(market_ids, chunk_size)
))
def iter_list_market_profit_and_loss(
self, market_ids, chunk_size, **kwargs):
"""Split call to `list_market_profit_and_loss` into separate requests.
:param list market_ids: List of market IDs
:param int chunk_size: Number of records per chunk
:param dict kwargs: Arguments passed to `list_market_profit_and_loss`
"""
return itertools.chain(*(
self.list_market_profit_and_loss(market_chunk, **kwargs)
for market_chunk in utils.get_chunks(market_ids, chunk_size)
))
# Betting methods
@utils.requires_login
def list_current_orders(
self, bet_ids=None, market_ids=None, order_projection=None,
date_range=None, order_by=None, sort_dir=None, from_record=None,
record_count=None):
"""
:param bet_ids:
:param market_ids:
:param order_projection:
:param date_range:
:param order_by:
:param sort_dir:
:param from_record:
:param record_count:
"""
return self.make_api_request(
'Sports',
'listCurrentOrders',
utils.get_kwargs(locals()),
model=models.CurrentOrderSummaryReport,
)
@utils.requires_login
def list_cleared_orders(
self, bet_status, event_type_ids, event_ids, market_ids,
runner_ids, bet_ids, side, settled_date_range, group_by,
include_item_description, locale, from_record, record_count):
"""
:param bet_status:
:param event_type_ids:
:param event_ids:
:param market_ids:
:param runner_ids:
:param bet_ids:
:param side:
:param settled_date_range:
:param group_by:
:param include_item_description:
:param locale:
:param from_record:
:param record_count:
"""
return self.make_api_request(
'Sports',
'listClearedOrders',
utils.get_kwargs(locals()),
model=models.ClearedOrderSummaryReport,
)
@utils.requires_login
def place_orders(self, market_id, instructions, customer_ref=None):
"""Place new orders into market. This operation is atomic in that all
orders will be placed or none will be placed.
:param str market_id: The market id these orders are to be placed on
:param list instructions: List of `PlaceInstruction` objects
:param str customer_ref: Optional order identifier string
"""
return self.make_api_request(
'Sports',
'placeOrders',
utils.get_kwargs(locals()),
model=models.PlaceExecutionReport,
)
@utils.requires_login
def cancel_orders(self, market_id, instructions, customer_ref=None):
"""Cancel all bets OR cancel all bets on a market OR fully or
partially cancel particular orders on a market.
:param str market_id: If not supplied all bets are cancelled
:param list instructions: List of `CancelInstruction` objects
:param str customer_ref: Optional order identifier string
"""
return self.make_api_request(
'Sports',
'cancelOrders',
utils.get_kwargs(locals()),
model=models.CancelExecutionReport,
)
@utils.requires_login
def replace_orders(self, market_id, instructions, customer_ref=None):
"""This operation is logically a bulk cancel followed by a bulk place.
The cancel is completed first then the new orders are placed.
:param str market_id: The market id these orders are to be placed on
:param list instructions: List of `ReplaceInstruction` objects
:param str customer_ref: Optional order identifier string
"""
return self.make_api_request(
'Sports',
'replaceOrders',
utils.get_kwargs(locals()),
model=models.ReplaceExecutionReport,
)
@utils.requires_login
@utils.requires_login
def get_account_funds(self, wallet=None):
"""Get available to bet amount.
:param Wallet wallet: Name of the wallet in question
"""
return self.make_api_request(
'Account',
'getAccountFunds',
utils.get_kwargs(locals()),
model=models.AccountFundsResponse,
)
@utils.requires_login
def get_account_statement(
self, locale=None, from_record=None, record_count=None,
item_date_range=None, include_item=None, wallet=None):
"""Get account statement.
:param str locale: The language to be used where applicable
:param int from_record: Specifies the first record that will be returned
:param int record_count: Specifies the maximum number of records to be returned
:param TimeRange item_date_range: Return items with an itemDate within this date range
:param IncludeItem include_item: Which items to include
:param Wallet wallte: Which wallet to return statementItems for
"""
return self.make_api_request(
'Account',
'getAccountStatement',
utils.get_kwargs(locals()),
model=models.AccountStatementReport,
)
@utils.requires_login
def get_account_details(self):
"""Returns the details relating your account, including your discount
rate and Betfair point balance.
"""
return self.make_api_request(
'Account',
'getAccountDetails',
utils.get_kwargs(locals()),
model=models.AccountDetailsResponse,
)
@utils.requires_login
def list_currency_rates(self, from_currency=None):
"""Returns a list of currency rates based on given currency
:param str from_currency: The currency from which the rates are computed
"""
return self.make_api_request(
'Account',
'listCurrencyRates',
utils.get_kwargs(locals()),
model=models.CurrencyRate,
)
@utils.requires_login
def transfer_funds(self, from_, to, amount):
"""Transfer funds between the UK Exchange and Australian Exchange wallets.
:param Wallet from_: Source wallet
:param Wallet to: Destination wallet
:param float amount: Amount to transfer
"""
return self.make_api_request(
'Account',
'transferFunds',
utils.get_kwargs(locals()),
model=models.TransferResponse,
)
|
jmcarp/betfair.py | betfair/betfair.py | Betfair.get_account_funds | python | def get_account_funds(self, wallet=None):
return self.make_api_request(
'Account',
'getAccountFunds',
utils.get_kwargs(locals()),
model=models.AccountFundsResponse,
) | Get available to bet amount.
:param Wallet wallet: Name of the wallet in question | train | https://github.com/jmcarp/betfair.py/blob/116df2fdc512575d1b4c4f1749d4a5bf98e519ff/betfair/betfair.py#L447-L457 | [
"def get_kwargs(kwargs):\n \"\"\"Get all keys and values from dictionary where key is not `self`.\n\n :param dict kwargs: Input parameters\n \"\"\"\n return {\n key: value for key, value in six.iteritems(kwargs)\n if key != 'self'\n }\n",
"def make_api_request(self, base, method, para... | class Betfair(object):
"""Betfair API client.
:param str app_key: Optional application identifier
:param str cert_file: Path to self-signed SSL certificate file(s); may be
a *.pem file or a tuple of (*.crt, *.key) files
:param str content_type: Optional content type
:param str locale: Optional location ("australia", "italy", etc.)
:param Session session: Optional Requests session
:param int timeout: Optional timeout duration (seconds)
"""
def __init__(self, app_key, cert_file, content_type='application/json', locale=None,
session=None, timeout=None):
self.app_key = app_key
self.cert_file = cert_file
self.content_type = content_type
self.locale = locale
self.session = session or requests.Session()
self.session_token = None
self.timeout = timeout
@property
def identity_url(self):
return IDENTITY_URLS[self.locale]
@property
def api_url(self):
return API_URLS[self.locale]
@property
def headers(self):
return {
'X-Application': self.app_key,
'X-Authentication': self.session_token,
'Content-Type': self.content_type,
'Accept': 'application/json',
}
def make_auth_request(self, method):
response = self.session.post(
os.path.join(self.identity_url, method),
headers=self.headers,
timeout=self.timeout,
)
utils.check_status_code(response)
data = response.json()
if data.get('status') != 'SUCCESS':
raise exceptions.AuthError(response, data)
def make_api_request(self, base, method, params, codes=None, model=None):
payload = utils.make_payload(base, method, params)
response = self.session.post(
self.api_url,
data=json.dumps(payload, cls=utils.BetfairEncoder),
headers=self.headers,
timeout=self.timeout,
)
utils.check_status_code(response, codes=codes)
result = utils.result_or_error(response)
return utils.process_result(result, model)
# Authentication methods
def login(self, username, password):
"""Log in to Betfair. Sets `session_token` if successful.
:param str username: Username
:param str password: Password
:raises: BetfairLoginError
"""
response = self.session.post(
os.path.join(self.identity_url, 'certlogin'),
cert=self.cert_file,
data=urllib.urlencode({
'username': username,
'password': password,
}),
headers={
'X-Application': self.app_key,
'Content-Type': 'application/x-www-form-urlencoded',
},
timeout=self.timeout,
)
utils.check_status_code(response, [httplib.OK])
data = response.json()
if data.get('loginStatus') != 'SUCCESS':
raise exceptions.LoginError(response, data)
self.session_token = data['sessionToken']
@utils.requires_login
def keep_alive(self):
"""Reset session timeout.
:raises: AuthError
"""
self.make_auth_request('keepAlive')
@utils.requires_login
def logout(self):
"""Log out and clear `session_token`.
:raises: AuthError
"""
self.make_auth_request('logout')
self.session_token = None
# Bet query methods
@utils.requires_login
def list_event_types(self, filter=None, locale=None):
"""
:param MarketFilter filter:
:param str locale:
"""
filter = filter or models.MarketFilter()
return self.make_api_request(
'Sports',
'listEventTypes',
utils.get_kwargs(locals()),
model=models.EventTypeResult,
)
@utils.requires_login
def list_competitions(self, filter=None, locale=None):
"""
:param MarketFilter filter:
:param str locale:
"""
filter = filter or models.MarketFilter()
return self.make_api_request(
'Sports',
'listCompetitions',
utils.get_kwargs(locals()),
model=models.CompetitionResult,
)
@utils.requires_login
def list_time_ranges(self, granularity, filter=None):
"""
:param TimeGranularity granularity:
:param MarketFilter filter:
"""
filter = filter or models.MarketFilter()
return self.make_api_request(
'Sports',
'listTimeRanges',
utils.get_kwargs(locals()),
model=models.TimeRangeResult,
)
@utils.requires_login
def list_events(self, filter=None, locale=None):
"""
:param MarketFilter filter:
:param str locale:
"""
filter = filter or models.MarketFilter()
return self.make_api_request(
'Sports',
'listEvents',
utils.get_kwargs(locals()),
model=models.EventResult,
)
@utils.requires_login
def list_market_types(self, filter=None, locale=None):
"""
:param MarketFilter filter:
:param str locale:
"""
filter = filter or models.MarketFilter()
return self.make_api_request(
'Sports',
'listMarketTypes',
utils.get_kwargs(locals()),
model=models.MarketTypeResult,
)
@utils.requires_login
def list_countries(self, filter=None, locale=None):
"""
:param MarketFilter filter:
:param str locale:
"""
filter = filter or models.MarketFilter()
return self.make_api_request(
'Sports',
'listCountries',
utils.get_kwargs(locals()),
model=models.CountryCodeResult,
)
@utils.requires_login
def list_venues(self, filter=None, locale=None):
"""
:param MarketFilter filter:
:param str locale:
"""
filter = filter or models.MarketFilter()
return self.make_api_request(
'Sports',
'listVenues',
utils.get_kwargs(locals()),
model=models.VenueResult,
)
@utils.requires_login
def list_market_catalogue(
self, filter=None, max_results=100, market_projection=None, locale=None,
sort=None):
"""
:param MarketFilter filter:
:param int max_results:
:param list market_projection:
:param MarketSort sort:
:param str locale:
"""
filter = filter or models.MarketFilter()
return self.make_api_request(
'Sports',
'listMarketCatalogue',
utils.get_kwargs(locals()),
model=models.MarketCatalogue,
)
@utils.requires_login
def list_market_book(
self, market_ids, price_projection=None, order_projection=None,
match_projection=None, currency_code=None, locale=None):
"""
:param list market_ids: List of market IDs
:param PriceProjection price_projection:
:param OrderProjection order_projection:
:param MatchProjection match_projection:
:param str currency_code:
:param str locale:
"""
return self.make_api_request(
'Sports',
'listMarketBook',
utils.get_kwargs(locals()),
model=models.MarketBook,
)
@utils.requires_login
def list_market_profit_and_loss(
self, market_ids, include_settled_bets=False,
include_bsp_bets=None, net_of_commission=None):
"""Retrieve profit and loss for a given list of markets.
:param list market_ids: List of markets to calculate profit and loss
:param bool include_settled_bets: Option to include settled bets
:param bool include_bsp_bets: Option to include BSP bets
:param bool net_of_commission: Option to return profit and loss net of
users current commission rate for this market including any special
tariffs
"""
return self.make_api_request(
'Sports',
'listMarketProfitAndLoss',
utils.get_kwargs(locals()),
model=models.MarketProfitAndLoss,
)
# Chunked iterators for list methods
def iter_list_market_book(self, market_ids, chunk_size, **kwargs):
"""Split call to `list_market_book` into separate requests.
:param list market_ids: List of market IDs
:param int chunk_size: Number of records per chunk
:param dict kwargs: Arguments passed to `list_market_book`
"""
return itertools.chain(*(
self.list_market_book(market_chunk, **kwargs)
for market_chunk in utils.get_chunks(market_ids, chunk_size)
))
def iter_list_market_profit_and_loss(
self, market_ids, chunk_size, **kwargs):
"""Split call to `list_market_profit_and_loss` into separate requests.
:param list market_ids: List of market IDs
:param int chunk_size: Number of records per chunk
:param dict kwargs: Arguments passed to `list_market_profit_and_loss`
"""
return itertools.chain(*(
self.list_market_profit_and_loss(market_chunk, **kwargs)
for market_chunk in utils.get_chunks(market_ids, chunk_size)
))
# Betting methods
@utils.requires_login
def list_current_orders(
self, bet_ids=None, market_ids=None, order_projection=None,
date_range=None, order_by=None, sort_dir=None, from_record=None,
record_count=None):
"""
:param bet_ids:
:param market_ids:
:param order_projection:
:param date_range:
:param order_by:
:param sort_dir:
:param from_record:
:param record_count:
"""
return self.make_api_request(
'Sports',
'listCurrentOrders',
utils.get_kwargs(locals()),
model=models.CurrentOrderSummaryReport,
)
@utils.requires_login
def list_cleared_orders(
self, bet_status, event_type_ids, event_ids, market_ids,
runner_ids, bet_ids, side, settled_date_range, group_by,
include_item_description, locale, from_record, record_count):
"""
:param bet_status:
:param event_type_ids:
:param event_ids:
:param market_ids:
:param runner_ids:
:param bet_ids:
:param side:
:param settled_date_range:
:param group_by:
:param include_item_description:
:param locale:
:param from_record:
:param record_count:
"""
return self.make_api_request(
'Sports',
'listClearedOrders',
utils.get_kwargs(locals()),
model=models.ClearedOrderSummaryReport,
)
@utils.requires_login
def place_orders(self, market_id, instructions, customer_ref=None):
"""Place new orders into market. This operation is atomic in that all
orders will be placed or none will be placed.
:param str market_id: The market id these orders are to be placed on
:param list instructions: List of `PlaceInstruction` objects
:param str customer_ref: Optional order identifier string
"""
return self.make_api_request(
'Sports',
'placeOrders',
utils.get_kwargs(locals()),
model=models.PlaceExecutionReport,
)
@utils.requires_login
def cancel_orders(self, market_id, instructions, customer_ref=None):
"""Cancel all bets OR cancel all bets on a market OR fully or
partially cancel particular orders on a market.
:param str market_id: If not supplied all bets are cancelled
:param list instructions: List of `CancelInstruction` objects
:param str customer_ref: Optional order identifier string
"""
return self.make_api_request(
'Sports',
'cancelOrders',
utils.get_kwargs(locals()),
model=models.CancelExecutionReport,
)
@utils.requires_login
def replace_orders(self, market_id, instructions, customer_ref=None):
"""This operation is logically a bulk cancel followed by a bulk place.
The cancel is completed first then the new orders are placed.
:param str market_id: The market id these orders are to be placed on
:param list instructions: List of `ReplaceInstruction` objects
:param str customer_ref: Optional order identifier string
"""
return self.make_api_request(
'Sports',
'replaceOrders',
utils.get_kwargs(locals()),
model=models.ReplaceExecutionReport,
)
@utils.requires_login
def update_orders(self, market_id, instructions, customer_ref=None):
"""Update non-exposure changing fields.
:param str market_id: The market id these orders are to be placed on
:param list instructions: List of `UpdateInstruction` objects
:param str customer_ref: Optional order identifier string
"""
return self.make_api_request(
'Sports',
'updateOrders',
utils.get_kwargs(locals()),
model=models.UpdateExecutionReport,
)
@utils.requires_login
@utils.requires_login
def get_account_statement(
self, locale=None, from_record=None, record_count=None,
item_date_range=None, include_item=None, wallet=None):
"""Get account statement.
:param str locale: The language to be used where applicable
:param int from_record: Specifies the first record that will be returned
:param int record_count: Specifies the maximum number of records to be returned
:param TimeRange item_date_range: Return items with an itemDate within this date range
:param IncludeItem include_item: Which items to include
:param Wallet wallte: Which wallet to return statementItems for
"""
return self.make_api_request(
'Account',
'getAccountStatement',
utils.get_kwargs(locals()),
model=models.AccountStatementReport,
)
@utils.requires_login
def get_account_details(self):
"""Returns the details relating your account, including your discount
rate and Betfair point balance.
"""
return self.make_api_request(
'Account',
'getAccountDetails',
utils.get_kwargs(locals()),
model=models.AccountDetailsResponse,
)
@utils.requires_login
def list_currency_rates(self, from_currency=None):
"""Returns a list of currency rates based on given currency
:param str from_currency: The currency from which the rates are computed
"""
return self.make_api_request(
'Account',
'listCurrencyRates',
utils.get_kwargs(locals()),
model=models.CurrencyRate,
)
@utils.requires_login
def transfer_funds(self, from_, to, amount):
"""Transfer funds between the UK Exchange and Australian Exchange wallets.
:param Wallet from_: Source wallet
:param Wallet to: Destination wallet
:param float amount: Amount to transfer
"""
return self.make_api_request(
'Account',
'transferFunds',
utils.get_kwargs(locals()),
model=models.TransferResponse,
)
|
jmcarp/betfair.py | betfair/betfair.py | Betfair.get_account_statement | python | def get_account_statement(
self, locale=None, from_record=None, record_count=None,
item_date_range=None, include_item=None, wallet=None):
return self.make_api_request(
'Account',
'getAccountStatement',
utils.get_kwargs(locals()),
model=models.AccountStatementReport,
) | Get account statement.
:param str locale: The language to be used where applicable
:param int from_record: Specifies the first record that will be returned
:param int record_count: Specifies the maximum number of records to be returned
:param TimeRange item_date_range: Return items with an itemDate within this date range
:param IncludeItem include_item: Which items to include
:param Wallet wallte: Which wallet to return statementItems for | train | https://github.com/jmcarp/betfair.py/blob/116df2fdc512575d1b4c4f1749d4a5bf98e519ff/betfair/betfair.py#L460-L477 | [
"def get_kwargs(kwargs):\n \"\"\"Get all keys and values from dictionary where key is not `self`.\n\n :param dict kwargs: Input parameters\n \"\"\"\n return {\n key: value for key, value in six.iteritems(kwargs)\n if key != 'self'\n }\n",
"def make_api_request(self, base, method, para... | class Betfair(object):
"""Betfair API client.
:param str app_key: Optional application identifier
:param str cert_file: Path to self-signed SSL certificate file(s); may be
a *.pem file or a tuple of (*.crt, *.key) files
:param str content_type: Optional content type
:param str locale: Optional location ("australia", "italy", etc.)
:param Session session: Optional Requests session
:param int timeout: Optional timeout duration (seconds)
"""
def __init__(self, app_key, cert_file, content_type='application/json', locale=None,
session=None, timeout=None):
self.app_key = app_key
self.cert_file = cert_file
self.content_type = content_type
self.locale = locale
self.session = session or requests.Session()
self.session_token = None
self.timeout = timeout
@property
def identity_url(self):
return IDENTITY_URLS[self.locale]
@property
def api_url(self):
return API_URLS[self.locale]
@property
def headers(self):
return {
'X-Application': self.app_key,
'X-Authentication': self.session_token,
'Content-Type': self.content_type,
'Accept': 'application/json',
}
def make_auth_request(self, method):
response = self.session.post(
os.path.join(self.identity_url, method),
headers=self.headers,
timeout=self.timeout,
)
utils.check_status_code(response)
data = response.json()
if data.get('status') != 'SUCCESS':
raise exceptions.AuthError(response, data)
def make_api_request(self, base, method, params, codes=None, model=None):
payload = utils.make_payload(base, method, params)
response = self.session.post(
self.api_url,
data=json.dumps(payload, cls=utils.BetfairEncoder),
headers=self.headers,
timeout=self.timeout,
)
utils.check_status_code(response, codes=codes)
result = utils.result_or_error(response)
return utils.process_result(result, model)
# Authentication methods
def login(self, username, password):
"""Log in to Betfair. Sets `session_token` if successful.
:param str username: Username
:param str password: Password
:raises: BetfairLoginError
"""
response = self.session.post(
os.path.join(self.identity_url, 'certlogin'),
cert=self.cert_file,
data=urllib.urlencode({
'username': username,
'password': password,
}),
headers={
'X-Application': self.app_key,
'Content-Type': 'application/x-www-form-urlencoded',
},
timeout=self.timeout,
)
utils.check_status_code(response, [httplib.OK])
data = response.json()
if data.get('loginStatus') != 'SUCCESS':
raise exceptions.LoginError(response, data)
self.session_token = data['sessionToken']
@utils.requires_login
def keep_alive(self):
"""Reset session timeout.
:raises: AuthError
"""
self.make_auth_request('keepAlive')
@utils.requires_login
def logout(self):
"""Log out and clear `session_token`.
:raises: AuthError
"""
self.make_auth_request('logout')
self.session_token = None
# Bet query methods
@utils.requires_login
def list_event_types(self, filter=None, locale=None):
"""
:param MarketFilter filter:
:param str locale:
"""
filter = filter or models.MarketFilter()
return self.make_api_request(
'Sports',
'listEventTypes',
utils.get_kwargs(locals()),
model=models.EventTypeResult,
)
@utils.requires_login
def list_competitions(self, filter=None, locale=None):
"""
:param MarketFilter filter:
:param str locale:
"""
filter = filter or models.MarketFilter()
return self.make_api_request(
'Sports',
'listCompetitions',
utils.get_kwargs(locals()),
model=models.CompetitionResult,
)
@utils.requires_login
def list_time_ranges(self, granularity, filter=None):
"""
:param TimeGranularity granularity:
:param MarketFilter filter:
"""
filter = filter or models.MarketFilter()
return self.make_api_request(
'Sports',
'listTimeRanges',
utils.get_kwargs(locals()),
model=models.TimeRangeResult,
)
@utils.requires_login
def list_events(self, filter=None, locale=None):
"""
:param MarketFilter filter:
:param str locale:
"""
filter = filter or models.MarketFilter()
return self.make_api_request(
'Sports',
'listEvents',
utils.get_kwargs(locals()),
model=models.EventResult,
)
@utils.requires_login
def list_market_types(self, filter=None, locale=None):
"""
:param MarketFilter filter:
:param str locale:
"""
filter = filter or models.MarketFilter()
return self.make_api_request(
'Sports',
'listMarketTypes',
utils.get_kwargs(locals()),
model=models.MarketTypeResult,
)
@utils.requires_login
def list_countries(self, filter=None, locale=None):
"""
:param MarketFilter filter:
:param str locale:
"""
filter = filter or models.MarketFilter()
return self.make_api_request(
'Sports',
'listCountries',
utils.get_kwargs(locals()),
model=models.CountryCodeResult,
)
@utils.requires_login
def list_venues(self, filter=None, locale=None):
"""
:param MarketFilter filter:
:param str locale:
"""
filter = filter or models.MarketFilter()
return self.make_api_request(
'Sports',
'listVenues',
utils.get_kwargs(locals()),
model=models.VenueResult,
)
@utils.requires_login
def list_market_catalogue(
self, filter=None, max_results=100, market_projection=None, locale=None,
sort=None):
"""
:param MarketFilter filter:
:param int max_results:
:param list market_projection:
:param MarketSort sort:
:param str locale:
"""
filter = filter or models.MarketFilter()
return self.make_api_request(
'Sports',
'listMarketCatalogue',
utils.get_kwargs(locals()),
model=models.MarketCatalogue,
)
@utils.requires_login
def list_market_book(
self, market_ids, price_projection=None, order_projection=None,
match_projection=None, currency_code=None, locale=None):
"""
:param list market_ids: List of market IDs
:param PriceProjection price_projection:
:param OrderProjection order_projection:
:param MatchProjection match_projection:
:param str currency_code:
:param str locale:
"""
return self.make_api_request(
'Sports',
'listMarketBook',
utils.get_kwargs(locals()),
model=models.MarketBook,
)
@utils.requires_login
def list_market_profit_and_loss(
self, market_ids, include_settled_bets=False,
include_bsp_bets=None, net_of_commission=None):
"""Retrieve profit and loss for a given list of markets.
:param list market_ids: List of markets to calculate profit and loss
:param bool include_settled_bets: Option to include settled bets
:param bool include_bsp_bets: Option to include BSP bets
:param bool net_of_commission: Option to return profit and loss net of
users current commission rate for this market including any special
tariffs
"""
return self.make_api_request(
'Sports',
'listMarketProfitAndLoss',
utils.get_kwargs(locals()),
model=models.MarketProfitAndLoss,
)
# Chunked iterators for list methods
def iter_list_market_book(self, market_ids, chunk_size, **kwargs):
"""Split call to `list_market_book` into separate requests.
:param list market_ids: List of market IDs
:param int chunk_size: Number of records per chunk
:param dict kwargs: Arguments passed to `list_market_book`
"""
return itertools.chain(*(
self.list_market_book(market_chunk, **kwargs)
for market_chunk in utils.get_chunks(market_ids, chunk_size)
))
def iter_list_market_profit_and_loss(
self, market_ids, chunk_size, **kwargs):
"""Split call to `list_market_profit_and_loss` into separate requests.
:param list market_ids: List of market IDs
:param int chunk_size: Number of records per chunk
:param dict kwargs: Arguments passed to `list_market_profit_and_loss`
"""
return itertools.chain(*(
self.list_market_profit_and_loss(market_chunk, **kwargs)
for market_chunk in utils.get_chunks(market_ids, chunk_size)
))
# Betting methods
@utils.requires_login
def list_current_orders(
self, bet_ids=None, market_ids=None, order_projection=None,
date_range=None, order_by=None, sort_dir=None, from_record=None,
record_count=None):
"""
:param bet_ids:
:param market_ids:
:param order_projection:
:param date_range:
:param order_by:
:param sort_dir:
:param from_record:
:param record_count:
"""
return self.make_api_request(
'Sports',
'listCurrentOrders',
utils.get_kwargs(locals()),
model=models.CurrentOrderSummaryReport,
)
@utils.requires_login
def list_cleared_orders(
self, bet_status, event_type_ids, event_ids, market_ids,
runner_ids, bet_ids, side, settled_date_range, group_by,
include_item_description, locale, from_record, record_count):
"""
:param bet_status:
:param event_type_ids:
:param event_ids:
:param market_ids:
:param runner_ids:
:param bet_ids:
:param side:
:param settled_date_range:
:param group_by:
:param include_item_description:
:param locale:
:param from_record:
:param record_count:
"""
return self.make_api_request(
'Sports',
'listClearedOrders',
utils.get_kwargs(locals()),
model=models.ClearedOrderSummaryReport,
)
@utils.requires_login
def place_orders(self, market_id, instructions, customer_ref=None):
"""Place new orders into market. This operation is atomic in that all
orders will be placed or none will be placed.
:param str market_id: The market id these orders are to be placed on
:param list instructions: List of `PlaceInstruction` objects
:param str customer_ref: Optional order identifier string
"""
return self.make_api_request(
'Sports',
'placeOrders',
utils.get_kwargs(locals()),
model=models.PlaceExecutionReport,
)
@utils.requires_login
def cancel_orders(self, market_id, instructions, customer_ref=None):
"""Cancel all bets OR cancel all bets on a market OR fully or
partially cancel particular orders on a market.
:param str market_id: If not supplied all bets are cancelled
:param list instructions: List of `CancelInstruction` objects
:param str customer_ref: Optional order identifier string
"""
return self.make_api_request(
'Sports',
'cancelOrders',
utils.get_kwargs(locals()),
model=models.CancelExecutionReport,
)
@utils.requires_login
def replace_orders(self, market_id, instructions, customer_ref=None):
"""This operation is logically a bulk cancel followed by a bulk place.
The cancel is completed first then the new orders are placed.
:param str market_id: The market id these orders are to be placed on
:param list instructions: List of `ReplaceInstruction` objects
:param str customer_ref: Optional order identifier string
"""
return self.make_api_request(
'Sports',
'replaceOrders',
utils.get_kwargs(locals()),
model=models.ReplaceExecutionReport,
)
@utils.requires_login
def update_orders(self, market_id, instructions, customer_ref=None):
"""Update non-exposure changing fields.
:param str market_id: The market id these orders are to be placed on
:param list instructions: List of `UpdateInstruction` objects
:param str customer_ref: Optional order identifier string
"""
return self.make_api_request(
'Sports',
'updateOrders',
utils.get_kwargs(locals()),
model=models.UpdateExecutionReport,
)
@utils.requires_login
def get_account_funds(self, wallet=None):
"""Get available to bet amount.
:param Wallet wallet: Name of the wallet in question
"""
return self.make_api_request(
'Account',
'getAccountFunds',
utils.get_kwargs(locals()),
model=models.AccountFundsResponse,
)
@utils.requires_login
@utils.requires_login
def get_account_details(self):
"""Returns the details relating your account, including your discount
rate and Betfair point balance.
"""
return self.make_api_request(
'Account',
'getAccountDetails',
utils.get_kwargs(locals()),
model=models.AccountDetailsResponse,
)
@utils.requires_login
def list_currency_rates(self, from_currency=None):
"""Returns a list of currency rates based on given currency
:param str from_currency: The currency from which the rates are computed
"""
return self.make_api_request(
'Account',
'listCurrencyRates',
utils.get_kwargs(locals()),
model=models.CurrencyRate,
)
@utils.requires_login
def transfer_funds(self, from_, to, amount):
"""Transfer funds between the UK Exchange and Australian Exchange wallets.
:param Wallet from_: Source wallet
:param Wallet to: Destination wallet
:param float amount: Amount to transfer
"""
return self.make_api_request(
'Account',
'transferFunds',
utils.get_kwargs(locals()),
model=models.TransferResponse,
)
|
jmcarp/betfair.py | betfair/betfair.py | Betfair.get_account_details | python | def get_account_details(self):
return self.make_api_request(
'Account',
'getAccountDetails',
utils.get_kwargs(locals()),
model=models.AccountDetailsResponse,
) | Returns the details relating your account, including your discount
rate and Betfair point balance. | train | https://github.com/jmcarp/betfair.py/blob/116df2fdc512575d1b4c4f1749d4a5bf98e519ff/betfair/betfair.py#L480-L489 | [
"def get_kwargs(kwargs):\n \"\"\"Get all keys and values from dictionary where key is not `self`.\n\n :param dict kwargs: Input parameters\n \"\"\"\n return {\n key: value for key, value in six.iteritems(kwargs)\n if key != 'self'\n }\n",
"def make_api_request(self, base, method, para... | class Betfair(object):
"""Betfair API client.
:param str app_key: Optional application identifier
:param str cert_file: Path to self-signed SSL certificate file(s); may be
a *.pem file or a tuple of (*.crt, *.key) files
:param str content_type: Optional content type
:param str locale: Optional location ("australia", "italy", etc.)
:param Session session: Optional Requests session
:param int timeout: Optional timeout duration (seconds)
"""
def __init__(self, app_key, cert_file, content_type='application/json', locale=None,
session=None, timeout=None):
self.app_key = app_key
self.cert_file = cert_file
self.content_type = content_type
self.locale = locale
self.session = session or requests.Session()
self.session_token = None
self.timeout = timeout
@property
def identity_url(self):
return IDENTITY_URLS[self.locale]
@property
def api_url(self):
return API_URLS[self.locale]
@property
def headers(self):
return {
'X-Application': self.app_key,
'X-Authentication': self.session_token,
'Content-Type': self.content_type,
'Accept': 'application/json',
}
def make_auth_request(self, method):
response = self.session.post(
os.path.join(self.identity_url, method),
headers=self.headers,
timeout=self.timeout,
)
utils.check_status_code(response)
data = response.json()
if data.get('status') != 'SUCCESS':
raise exceptions.AuthError(response, data)
def make_api_request(self, base, method, params, codes=None, model=None):
payload = utils.make_payload(base, method, params)
response = self.session.post(
self.api_url,
data=json.dumps(payload, cls=utils.BetfairEncoder),
headers=self.headers,
timeout=self.timeout,
)
utils.check_status_code(response, codes=codes)
result = utils.result_or_error(response)
return utils.process_result(result, model)
# Authentication methods
def login(self, username, password):
"""Log in to Betfair. Sets `session_token` if successful.
:param str username: Username
:param str password: Password
:raises: BetfairLoginError
"""
response = self.session.post(
os.path.join(self.identity_url, 'certlogin'),
cert=self.cert_file,
data=urllib.urlencode({
'username': username,
'password': password,
}),
headers={
'X-Application': self.app_key,
'Content-Type': 'application/x-www-form-urlencoded',
},
timeout=self.timeout,
)
utils.check_status_code(response, [httplib.OK])
data = response.json()
if data.get('loginStatus') != 'SUCCESS':
raise exceptions.LoginError(response, data)
self.session_token = data['sessionToken']
@utils.requires_login
def keep_alive(self):
"""Reset session timeout.
:raises: AuthError
"""
self.make_auth_request('keepAlive')
@utils.requires_login
def logout(self):
"""Log out and clear `session_token`.
:raises: AuthError
"""
self.make_auth_request('logout')
self.session_token = None
# Bet query methods
@utils.requires_login
def list_event_types(self, filter=None, locale=None):
"""
:param MarketFilter filter:
:param str locale:
"""
filter = filter or models.MarketFilter()
return self.make_api_request(
'Sports',
'listEventTypes',
utils.get_kwargs(locals()),
model=models.EventTypeResult,
)
@utils.requires_login
def list_competitions(self, filter=None, locale=None):
"""
:param MarketFilter filter:
:param str locale:
"""
filter = filter or models.MarketFilter()
return self.make_api_request(
'Sports',
'listCompetitions',
utils.get_kwargs(locals()),
model=models.CompetitionResult,
)
@utils.requires_login
def list_time_ranges(self, granularity, filter=None):
"""
:param TimeGranularity granularity:
:param MarketFilter filter:
"""
filter = filter or models.MarketFilter()
return self.make_api_request(
'Sports',
'listTimeRanges',
utils.get_kwargs(locals()),
model=models.TimeRangeResult,
)
@utils.requires_login
def list_events(self, filter=None, locale=None):
"""
:param MarketFilter filter:
:param str locale:
"""
filter = filter or models.MarketFilter()
return self.make_api_request(
'Sports',
'listEvents',
utils.get_kwargs(locals()),
model=models.EventResult,
)
@utils.requires_login
def list_market_types(self, filter=None, locale=None):
"""
:param MarketFilter filter:
:param str locale:
"""
filter = filter or models.MarketFilter()
return self.make_api_request(
'Sports',
'listMarketTypes',
utils.get_kwargs(locals()),
model=models.MarketTypeResult,
)
@utils.requires_login
def list_countries(self, filter=None, locale=None):
"""
:param MarketFilter filter:
:param str locale:
"""
filter = filter or models.MarketFilter()
return self.make_api_request(
'Sports',
'listCountries',
utils.get_kwargs(locals()),
model=models.CountryCodeResult,
)
@utils.requires_login
def list_venues(self, filter=None, locale=None):
"""
:param MarketFilter filter:
:param str locale:
"""
filter = filter or models.MarketFilter()
return self.make_api_request(
'Sports',
'listVenues',
utils.get_kwargs(locals()),
model=models.VenueResult,
)
@utils.requires_login
def list_market_catalogue(
self, filter=None, max_results=100, market_projection=None, locale=None,
sort=None):
"""
:param MarketFilter filter:
:param int max_results:
:param list market_projection:
:param MarketSort sort:
:param str locale:
"""
filter = filter or models.MarketFilter()
return self.make_api_request(
'Sports',
'listMarketCatalogue',
utils.get_kwargs(locals()),
model=models.MarketCatalogue,
)
@utils.requires_login
def list_market_book(
self, market_ids, price_projection=None, order_projection=None,
match_projection=None, currency_code=None, locale=None):
"""
:param list market_ids: List of market IDs
:param PriceProjection price_projection:
:param OrderProjection order_projection:
:param MatchProjection match_projection:
:param str currency_code:
:param str locale:
"""
return self.make_api_request(
'Sports',
'listMarketBook',
utils.get_kwargs(locals()),
model=models.MarketBook,
)
@utils.requires_login
def list_market_profit_and_loss(
self, market_ids, include_settled_bets=False,
include_bsp_bets=None, net_of_commission=None):
"""Retrieve profit and loss for a given list of markets.
:param list market_ids: List of markets to calculate profit and loss
:param bool include_settled_bets: Option to include settled bets
:param bool include_bsp_bets: Option to include BSP bets
:param bool net_of_commission: Option to return profit and loss net of
users current commission rate for this market including any special
tariffs
"""
return self.make_api_request(
'Sports',
'listMarketProfitAndLoss',
utils.get_kwargs(locals()),
model=models.MarketProfitAndLoss,
)
# Chunked iterators for list methods
def iter_list_market_book(self, market_ids, chunk_size, **kwargs):
"""Split call to `list_market_book` into separate requests.
:param list market_ids: List of market IDs
:param int chunk_size: Number of records per chunk
:param dict kwargs: Arguments passed to `list_market_book`
"""
return itertools.chain(*(
self.list_market_book(market_chunk, **kwargs)
for market_chunk in utils.get_chunks(market_ids, chunk_size)
))
def iter_list_market_profit_and_loss(
self, market_ids, chunk_size, **kwargs):
"""Split call to `list_market_profit_and_loss` into separate requests.
:param list market_ids: List of market IDs
:param int chunk_size: Number of records per chunk
:param dict kwargs: Arguments passed to `list_market_profit_and_loss`
"""
return itertools.chain(*(
self.list_market_profit_and_loss(market_chunk, **kwargs)
for market_chunk in utils.get_chunks(market_ids, chunk_size)
))
# Betting methods
@utils.requires_login
def list_current_orders(
self, bet_ids=None, market_ids=None, order_projection=None,
date_range=None, order_by=None, sort_dir=None, from_record=None,
record_count=None):
"""
:param bet_ids:
:param market_ids:
:param order_projection:
:param date_range:
:param order_by:
:param sort_dir:
:param from_record:
:param record_count:
"""
return self.make_api_request(
'Sports',
'listCurrentOrders',
utils.get_kwargs(locals()),
model=models.CurrentOrderSummaryReport,
)
@utils.requires_login
def list_cleared_orders(
self, bet_status, event_type_ids, event_ids, market_ids,
runner_ids, bet_ids, side, settled_date_range, group_by,
include_item_description, locale, from_record, record_count):
"""
:param bet_status:
:param event_type_ids:
:param event_ids:
:param market_ids:
:param runner_ids:
:param bet_ids:
:param side:
:param settled_date_range:
:param group_by:
:param include_item_description:
:param locale:
:param from_record:
:param record_count:
"""
return self.make_api_request(
'Sports',
'listClearedOrders',
utils.get_kwargs(locals()),
model=models.ClearedOrderSummaryReport,
)
@utils.requires_login
def place_orders(self, market_id, instructions, customer_ref=None):
"""Place new orders into market. This operation is atomic in that all
orders will be placed or none will be placed.
:param str market_id: The market id these orders are to be placed on
:param list instructions: List of `PlaceInstruction` objects
:param str customer_ref: Optional order identifier string
"""
return self.make_api_request(
'Sports',
'placeOrders',
utils.get_kwargs(locals()),
model=models.PlaceExecutionReport,
)
@utils.requires_login
def cancel_orders(self, market_id, instructions, customer_ref=None):
"""Cancel all bets OR cancel all bets on a market OR fully or
partially cancel particular orders on a market.
:param str market_id: If not supplied all bets are cancelled
:param list instructions: List of `CancelInstruction` objects
:param str customer_ref: Optional order identifier string
"""
return self.make_api_request(
'Sports',
'cancelOrders',
utils.get_kwargs(locals()),
model=models.CancelExecutionReport,
)
@utils.requires_login
def replace_orders(self, market_id, instructions, customer_ref=None):
"""This operation is logically a bulk cancel followed by a bulk place.
The cancel is completed first then the new orders are placed.
:param str market_id: The market id these orders are to be placed on
:param list instructions: List of `ReplaceInstruction` objects
:param str customer_ref: Optional order identifier string
"""
return self.make_api_request(
'Sports',
'replaceOrders',
utils.get_kwargs(locals()),
model=models.ReplaceExecutionReport,
)
@utils.requires_login
def update_orders(self, market_id, instructions, customer_ref=None):
"""Update non-exposure changing fields.
:param str market_id: The market id these orders are to be placed on
:param list instructions: List of `UpdateInstruction` objects
:param str customer_ref: Optional order identifier string
"""
return self.make_api_request(
'Sports',
'updateOrders',
utils.get_kwargs(locals()),
model=models.UpdateExecutionReport,
)
@utils.requires_login
def get_account_funds(self, wallet=None):
"""Get available to bet amount.
:param Wallet wallet: Name of the wallet in question
"""
return self.make_api_request(
'Account',
'getAccountFunds',
utils.get_kwargs(locals()),
model=models.AccountFundsResponse,
)
@utils.requires_login
def get_account_statement(
self, locale=None, from_record=None, record_count=None,
item_date_range=None, include_item=None, wallet=None):
"""Get account statement.
:param str locale: The language to be used where applicable
:param int from_record: Specifies the first record that will be returned
:param int record_count: Specifies the maximum number of records to be returned
:param TimeRange item_date_range: Return items with an itemDate within this date range
:param IncludeItem include_item: Which items to include
:param Wallet wallte: Which wallet to return statementItems for
"""
return self.make_api_request(
'Account',
'getAccountStatement',
utils.get_kwargs(locals()),
model=models.AccountStatementReport,
)
@utils.requires_login
@utils.requires_login
def list_currency_rates(self, from_currency=None):
"""Returns a list of currency rates based on given currency
:param str from_currency: The currency from which the rates are computed
"""
return self.make_api_request(
'Account',
'listCurrencyRates',
utils.get_kwargs(locals()),
model=models.CurrencyRate,
)
@utils.requires_login
def transfer_funds(self, from_, to, amount):
"""Transfer funds between the UK Exchange and Australian Exchange wallets.
:param Wallet from_: Source wallet
:param Wallet to: Destination wallet
:param float amount: Amount to transfer
"""
return self.make_api_request(
'Account',
'transferFunds',
utils.get_kwargs(locals()),
model=models.TransferResponse,
)
|
jmcarp/betfair.py | betfair/betfair.py | Betfair.list_currency_rates | python | def list_currency_rates(self, from_currency=None):
return self.make_api_request(
'Account',
'listCurrencyRates',
utils.get_kwargs(locals()),
model=models.CurrencyRate,
) | Returns a list of currency rates based on given currency
:param str from_currency: The currency from which the rates are computed | train | https://github.com/jmcarp/betfair.py/blob/116df2fdc512575d1b4c4f1749d4a5bf98e519ff/betfair/betfair.py#L492-L502 | [
"def get_kwargs(kwargs):\n \"\"\"Get all keys and values from dictionary where key is not `self`.\n\n :param dict kwargs: Input parameters\n \"\"\"\n return {\n key: value for key, value in six.iteritems(kwargs)\n if key != 'self'\n }\n",
"def make_api_request(self, base, method, para... | class Betfair(object):
"""Betfair API client.
:param str app_key: Optional application identifier
:param str cert_file: Path to self-signed SSL certificate file(s); may be
a *.pem file or a tuple of (*.crt, *.key) files
:param str content_type: Optional content type
:param str locale: Optional location ("australia", "italy", etc.)
:param Session session: Optional Requests session
:param int timeout: Optional timeout duration (seconds)
"""
def __init__(self, app_key, cert_file, content_type='application/json', locale=None,
session=None, timeout=None):
self.app_key = app_key
self.cert_file = cert_file
self.content_type = content_type
self.locale = locale
self.session = session or requests.Session()
self.session_token = None
self.timeout = timeout
@property
def identity_url(self):
return IDENTITY_URLS[self.locale]
@property
def api_url(self):
return API_URLS[self.locale]
@property
def headers(self):
return {
'X-Application': self.app_key,
'X-Authentication': self.session_token,
'Content-Type': self.content_type,
'Accept': 'application/json',
}
def make_auth_request(self, method):
response = self.session.post(
os.path.join(self.identity_url, method),
headers=self.headers,
timeout=self.timeout,
)
utils.check_status_code(response)
data = response.json()
if data.get('status') != 'SUCCESS':
raise exceptions.AuthError(response, data)
def make_api_request(self, base, method, params, codes=None, model=None):
payload = utils.make_payload(base, method, params)
response = self.session.post(
self.api_url,
data=json.dumps(payload, cls=utils.BetfairEncoder),
headers=self.headers,
timeout=self.timeout,
)
utils.check_status_code(response, codes=codes)
result = utils.result_or_error(response)
return utils.process_result(result, model)
# Authentication methods
def login(self, username, password):
"""Log in to Betfair. Sets `session_token` if successful.
:param str username: Username
:param str password: Password
:raises: BetfairLoginError
"""
response = self.session.post(
os.path.join(self.identity_url, 'certlogin'),
cert=self.cert_file,
data=urllib.urlencode({
'username': username,
'password': password,
}),
headers={
'X-Application': self.app_key,
'Content-Type': 'application/x-www-form-urlencoded',
},
timeout=self.timeout,
)
utils.check_status_code(response, [httplib.OK])
data = response.json()
if data.get('loginStatus') != 'SUCCESS':
raise exceptions.LoginError(response, data)
self.session_token = data['sessionToken']
@utils.requires_login
def keep_alive(self):
"""Reset session timeout.
:raises: AuthError
"""
self.make_auth_request('keepAlive')
@utils.requires_login
def logout(self):
"""Log out and clear `session_token`.
:raises: AuthError
"""
self.make_auth_request('logout')
self.session_token = None
# Bet query methods
@utils.requires_login
def list_event_types(self, filter=None, locale=None):
"""
:param MarketFilter filter:
:param str locale:
"""
filter = filter or models.MarketFilter()
return self.make_api_request(
'Sports',
'listEventTypes',
utils.get_kwargs(locals()),
model=models.EventTypeResult,
)
@utils.requires_login
def list_competitions(self, filter=None, locale=None):
"""
:param MarketFilter filter:
:param str locale:
"""
filter = filter or models.MarketFilter()
return self.make_api_request(
'Sports',
'listCompetitions',
utils.get_kwargs(locals()),
model=models.CompetitionResult,
)
@utils.requires_login
def list_time_ranges(self, granularity, filter=None):
"""
:param TimeGranularity granularity:
:param MarketFilter filter:
"""
filter = filter or models.MarketFilter()
return self.make_api_request(
'Sports',
'listTimeRanges',
utils.get_kwargs(locals()),
model=models.TimeRangeResult,
)
@utils.requires_login
def list_events(self, filter=None, locale=None):
"""
:param MarketFilter filter:
:param str locale:
"""
filter = filter or models.MarketFilter()
return self.make_api_request(
'Sports',
'listEvents',
utils.get_kwargs(locals()),
model=models.EventResult,
)
@utils.requires_login
def list_market_types(self, filter=None, locale=None):
"""
:param MarketFilter filter:
:param str locale:
"""
filter = filter or models.MarketFilter()
return self.make_api_request(
'Sports',
'listMarketTypes',
utils.get_kwargs(locals()),
model=models.MarketTypeResult,
)
@utils.requires_login
def list_countries(self, filter=None, locale=None):
"""
:param MarketFilter filter:
:param str locale:
"""
filter = filter or models.MarketFilter()
return self.make_api_request(
'Sports',
'listCountries',
utils.get_kwargs(locals()),
model=models.CountryCodeResult,
)
@utils.requires_login
def list_venues(self, filter=None, locale=None):
"""
:param MarketFilter filter:
:param str locale:
"""
filter = filter or models.MarketFilter()
return self.make_api_request(
'Sports',
'listVenues',
utils.get_kwargs(locals()),
model=models.VenueResult,
)
@utils.requires_login
def list_market_catalogue(
self, filter=None, max_results=100, market_projection=None, locale=None,
sort=None):
"""
:param MarketFilter filter:
:param int max_results:
:param list market_projection:
:param MarketSort sort:
:param str locale:
"""
filter = filter or models.MarketFilter()
return self.make_api_request(
'Sports',
'listMarketCatalogue',
utils.get_kwargs(locals()),
model=models.MarketCatalogue,
)
@utils.requires_login
def list_market_book(
self, market_ids, price_projection=None, order_projection=None,
match_projection=None, currency_code=None, locale=None):
"""
:param list market_ids: List of market IDs
:param PriceProjection price_projection:
:param OrderProjection order_projection:
:param MatchProjection match_projection:
:param str currency_code:
:param str locale:
"""
return self.make_api_request(
'Sports',
'listMarketBook',
utils.get_kwargs(locals()),
model=models.MarketBook,
)
@utils.requires_login
def list_market_profit_and_loss(
self, market_ids, include_settled_bets=False,
include_bsp_bets=None, net_of_commission=None):
"""Retrieve profit and loss for a given list of markets.
:param list market_ids: List of markets to calculate profit and loss
:param bool include_settled_bets: Option to include settled bets
:param bool include_bsp_bets: Option to include BSP bets
:param bool net_of_commission: Option to return profit and loss net of
users current commission rate for this market including any special
tariffs
"""
return self.make_api_request(
'Sports',
'listMarketProfitAndLoss',
utils.get_kwargs(locals()),
model=models.MarketProfitAndLoss,
)
# Chunked iterators for list methods
def iter_list_market_book(self, market_ids, chunk_size, **kwargs):
"""Split call to `list_market_book` into separate requests.
:param list market_ids: List of market IDs
:param int chunk_size: Number of records per chunk
:param dict kwargs: Arguments passed to `list_market_book`
"""
return itertools.chain(*(
self.list_market_book(market_chunk, **kwargs)
for market_chunk in utils.get_chunks(market_ids, chunk_size)
))
def iter_list_market_profit_and_loss(
self, market_ids, chunk_size, **kwargs):
"""Split call to `list_market_profit_and_loss` into separate requests.
:param list market_ids: List of market IDs
:param int chunk_size: Number of records per chunk
:param dict kwargs: Arguments passed to `list_market_profit_and_loss`
"""
return itertools.chain(*(
self.list_market_profit_and_loss(market_chunk, **kwargs)
for market_chunk in utils.get_chunks(market_ids, chunk_size)
))
# Betting methods
@utils.requires_login
def list_current_orders(
self, bet_ids=None, market_ids=None, order_projection=None,
date_range=None, order_by=None, sort_dir=None, from_record=None,
record_count=None):
"""
:param bet_ids:
:param market_ids:
:param order_projection:
:param date_range:
:param order_by:
:param sort_dir:
:param from_record:
:param record_count:
"""
return self.make_api_request(
'Sports',
'listCurrentOrders',
utils.get_kwargs(locals()),
model=models.CurrentOrderSummaryReport,
)
@utils.requires_login
def list_cleared_orders(
self, bet_status, event_type_ids, event_ids, market_ids,
runner_ids, bet_ids, side, settled_date_range, group_by,
include_item_description, locale, from_record, record_count):
"""
:param bet_status:
:param event_type_ids:
:param event_ids:
:param market_ids:
:param runner_ids:
:param bet_ids:
:param side:
:param settled_date_range:
:param group_by:
:param include_item_description:
:param locale:
:param from_record:
:param record_count:
"""
return self.make_api_request(
'Sports',
'listClearedOrders',
utils.get_kwargs(locals()),
model=models.ClearedOrderSummaryReport,
)
@utils.requires_login
def place_orders(self, market_id, instructions, customer_ref=None):
"""Place new orders into market. This operation is atomic in that all
orders will be placed or none will be placed.
:param str market_id: The market id these orders are to be placed on
:param list instructions: List of `PlaceInstruction` objects
:param str customer_ref: Optional order identifier string
"""
return self.make_api_request(
'Sports',
'placeOrders',
utils.get_kwargs(locals()),
model=models.PlaceExecutionReport,
)
@utils.requires_login
def cancel_orders(self, market_id, instructions, customer_ref=None):
"""Cancel all bets OR cancel all bets on a market OR fully or
partially cancel particular orders on a market.
:param str market_id: If not supplied all bets are cancelled
:param list instructions: List of `CancelInstruction` objects
:param str customer_ref: Optional order identifier string
"""
return self.make_api_request(
'Sports',
'cancelOrders',
utils.get_kwargs(locals()),
model=models.CancelExecutionReport,
)
@utils.requires_login
def replace_orders(self, market_id, instructions, customer_ref=None):
"""This operation is logically a bulk cancel followed by a bulk place.
The cancel is completed first then the new orders are placed.
:param str market_id: The market id these orders are to be placed on
:param list instructions: List of `ReplaceInstruction` objects
:param str customer_ref: Optional order identifier string
"""
return self.make_api_request(
'Sports',
'replaceOrders',
utils.get_kwargs(locals()),
model=models.ReplaceExecutionReport,
)
@utils.requires_login
def update_orders(self, market_id, instructions, customer_ref=None):
"""Update non-exposure changing fields.
:param str market_id: The market id these orders are to be placed on
:param list instructions: List of `UpdateInstruction` objects
:param str customer_ref: Optional order identifier string
"""
return self.make_api_request(
'Sports',
'updateOrders',
utils.get_kwargs(locals()),
model=models.UpdateExecutionReport,
)
@utils.requires_login
def get_account_funds(self, wallet=None):
"""Get available to bet amount.
:param Wallet wallet: Name of the wallet in question
"""
return self.make_api_request(
'Account',
'getAccountFunds',
utils.get_kwargs(locals()),
model=models.AccountFundsResponse,
)
@utils.requires_login
def get_account_statement(
self, locale=None, from_record=None, record_count=None,
item_date_range=None, include_item=None, wallet=None):
"""Get account statement.
:param str locale: The language to be used where applicable
:param int from_record: Specifies the first record that will be returned
:param int record_count: Specifies the maximum number of records to be returned
:param TimeRange item_date_range: Return items with an itemDate within this date range
:param IncludeItem include_item: Which items to include
:param Wallet wallte: Which wallet to return statementItems for
"""
return self.make_api_request(
'Account',
'getAccountStatement',
utils.get_kwargs(locals()),
model=models.AccountStatementReport,
)
@utils.requires_login
def get_account_details(self):
"""Returns the details relating your account, including your discount
rate and Betfair point balance.
"""
return self.make_api_request(
'Account',
'getAccountDetails',
utils.get_kwargs(locals()),
model=models.AccountDetailsResponse,
)
@utils.requires_login
@utils.requires_login
def transfer_funds(self, from_, to, amount):
"""Transfer funds between the UK Exchange and Australian Exchange wallets.
:param Wallet from_: Source wallet
:param Wallet to: Destination wallet
:param float amount: Amount to transfer
"""
return self.make_api_request(
'Account',
'transferFunds',
utils.get_kwargs(locals()),
model=models.TransferResponse,
)
|
jmcarp/betfair.py | betfair/betfair.py | Betfair.transfer_funds | python | def transfer_funds(self, from_, to, amount):
return self.make_api_request(
'Account',
'transferFunds',
utils.get_kwargs(locals()),
model=models.TransferResponse,
) | Transfer funds between the UK Exchange and Australian Exchange wallets.
:param Wallet from_: Source wallet
:param Wallet to: Destination wallet
:param float amount: Amount to transfer | train | https://github.com/jmcarp/betfair.py/blob/116df2fdc512575d1b4c4f1749d4a5bf98e519ff/betfair/betfair.py#L505-L517 | [
"def get_kwargs(kwargs):\n \"\"\"Get all keys and values from dictionary where key is not `self`.\n\n :param dict kwargs: Input parameters\n \"\"\"\n return {\n key: value for key, value in six.iteritems(kwargs)\n if key != 'self'\n }\n",
"def make_api_request(self, base, method, para... | class Betfair(object):
"""Betfair API client.
:param str app_key: Optional application identifier
:param str cert_file: Path to self-signed SSL certificate file(s); may be
a *.pem file or a tuple of (*.crt, *.key) files
:param str content_type: Optional content type
:param str locale: Optional location ("australia", "italy", etc.)
:param Session session: Optional Requests session
:param int timeout: Optional timeout duration (seconds)
"""
def __init__(self, app_key, cert_file, content_type='application/json', locale=None,
session=None, timeout=None):
self.app_key = app_key
self.cert_file = cert_file
self.content_type = content_type
self.locale = locale
self.session = session or requests.Session()
self.session_token = None
self.timeout = timeout
@property
def identity_url(self):
return IDENTITY_URLS[self.locale]
@property
def api_url(self):
return API_URLS[self.locale]
@property
def headers(self):
return {
'X-Application': self.app_key,
'X-Authentication': self.session_token,
'Content-Type': self.content_type,
'Accept': 'application/json',
}
def make_auth_request(self, method):
response = self.session.post(
os.path.join(self.identity_url, method),
headers=self.headers,
timeout=self.timeout,
)
utils.check_status_code(response)
data = response.json()
if data.get('status') != 'SUCCESS':
raise exceptions.AuthError(response, data)
def make_api_request(self, base, method, params, codes=None, model=None):
payload = utils.make_payload(base, method, params)
response = self.session.post(
self.api_url,
data=json.dumps(payload, cls=utils.BetfairEncoder),
headers=self.headers,
timeout=self.timeout,
)
utils.check_status_code(response, codes=codes)
result = utils.result_or_error(response)
return utils.process_result(result, model)
# Authentication methods
def login(self, username, password):
"""Log in to Betfair. Sets `session_token` if successful.
:param str username: Username
:param str password: Password
:raises: BetfairLoginError
"""
response = self.session.post(
os.path.join(self.identity_url, 'certlogin'),
cert=self.cert_file,
data=urllib.urlencode({
'username': username,
'password': password,
}),
headers={
'X-Application': self.app_key,
'Content-Type': 'application/x-www-form-urlencoded',
},
timeout=self.timeout,
)
utils.check_status_code(response, [httplib.OK])
data = response.json()
if data.get('loginStatus') != 'SUCCESS':
raise exceptions.LoginError(response, data)
self.session_token = data['sessionToken']
@utils.requires_login
def keep_alive(self):
"""Reset session timeout.
:raises: AuthError
"""
self.make_auth_request('keepAlive')
@utils.requires_login
def logout(self):
"""Log out and clear `session_token`.
:raises: AuthError
"""
self.make_auth_request('logout')
self.session_token = None
# Bet query methods
@utils.requires_login
def list_event_types(self, filter=None, locale=None):
"""
:param MarketFilter filter:
:param str locale:
"""
filter = filter or models.MarketFilter()
return self.make_api_request(
'Sports',
'listEventTypes',
utils.get_kwargs(locals()),
model=models.EventTypeResult,
)
@utils.requires_login
def list_competitions(self, filter=None, locale=None):
"""
:param MarketFilter filter:
:param str locale:
"""
filter = filter or models.MarketFilter()
return self.make_api_request(
'Sports',
'listCompetitions',
utils.get_kwargs(locals()),
model=models.CompetitionResult,
)
@utils.requires_login
def list_time_ranges(self, granularity, filter=None):
"""
:param TimeGranularity granularity:
:param MarketFilter filter:
"""
filter = filter or models.MarketFilter()
return self.make_api_request(
'Sports',
'listTimeRanges',
utils.get_kwargs(locals()),
model=models.TimeRangeResult,
)
@utils.requires_login
def list_events(self, filter=None, locale=None):
"""
:param MarketFilter filter:
:param str locale:
"""
filter = filter or models.MarketFilter()
return self.make_api_request(
'Sports',
'listEvents',
utils.get_kwargs(locals()),
model=models.EventResult,
)
@utils.requires_login
def list_market_types(self, filter=None, locale=None):
"""
:param MarketFilter filter:
:param str locale:
"""
filter = filter or models.MarketFilter()
return self.make_api_request(
'Sports',
'listMarketTypes',
utils.get_kwargs(locals()),
model=models.MarketTypeResult,
)
@utils.requires_login
def list_countries(self, filter=None, locale=None):
"""
:param MarketFilter filter:
:param str locale:
"""
filter = filter or models.MarketFilter()
return self.make_api_request(
'Sports',
'listCountries',
utils.get_kwargs(locals()),
model=models.CountryCodeResult,
)
@utils.requires_login
def list_venues(self, filter=None, locale=None):
"""
:param MarketFilter filter:
:param str locale:
"""
filter = filter or models.MarketFilter()
return self.make_api_request(
'Sports',
'listVenues',
utils.get_kwargs(locals()),
model=models.VenueResult,
)
@utils.requires_login
def list_market_catalogue(
self, filter=None, max_results=100, market_projection=None, locale=None,
sort=None):
"""
:param MarketFilter filter:
:param int max_results:
:param list market_projection:
:param MarketSort sort:
:param str locale:
"""
filter = filter or models.MarketFilter()
return self.make_api_request(
'Sports',
'listMarketCatalogue',
utils.get_kwargs(locals()),
model=models.MarketCatalogue,
)
@utils.requires_login
def list_market_book(
self, market_ids, price_projection=None, order_projection=None,
match_projection=None, currency_code=None, locale=None):
"""
:param list market_ids: List of market IDs
:param PriceProjection price_projection:
:param OrderProjection order_projection:
:param MatchProjection match_projection:
:param str currency_code:
:param str locale:
"""
return self.make_api_request(
'Sports',
'listMarketBook',
utils.get_kwargs(locals()),
model=models.MarketBook,
)
@utils.requires_login
def list_market_profit_and_loss(
self, market_ids, include_settled_bets=False,
include_bsp_bets=None, net_of_commission=None):
"""Retrieve profit and loss for a given list of markets.
:param list market_ids: List of markets to calculate profit and loss
:param bool include_settled_bets: Option to include settled bets
:param bool include_bsp_bets: Option to include BSP bets
:param bool net_of_commission: Option to return profit and loss net of
users current commission rate for this market including any special
tariffs
"""
return self.make_api_request(
'Sports',
'listMarketProfitAndLoss',
utils.get_kwargs(locals()),
model=models.MarketProfitAndLoss,
)
# Chunked iterators for list methods
def iter_list_market_book(self, market_ids, chunk_size, **kwargs):
"""Split call to `list_market_book` into separate requests.
:param list market_ids: List of market IDs
:param int chunk_size: Number of records per chunk
:param dict kwargs: Arguments passed to `list_market_book`
"""
return itertools.chain(*(
self.list_market_book(market_chunk, **kwargs)
for market_chunk in utils.get_chunks(market_ids, chunk_size)
))
def iter_list_market_profit_and_loss(
self, market_ids, chunk_size, **kwargs):
"""Split call to `list_market_profit_and_loss` into separate requests.
:param list market_ids: List of market IDs
:param int chunk_size: Number of records per chunk
:param dict kwargs: Arguments passed to `list_market_profit_and_loss`
"""
return itertools.chain(*(
self.list_market_profit_and_loss(market_chunk, **kwargs)
for market_chunk in utils.get_chunks(market_ids, chunk_size)
))
# Betting methods
@utils.requires_login
def list_current_orders(
self, bet_ids=None, market_ids=None, order_projection=None,
date_range=None, order_by=None, sort_dir=None, from_record=None,
record_count=None):
"""
:param bet_ids:
:param market_ids:
:param order_projection:
:param date_range:
:param order_by:
:param sort_dir:
:param from_record:
:param record_count:
"""
return self.make_api_request(
'Sports',
'listCurrentOrders',
utils.get_kwargs(locals()),
model=models.CurrentOrderSummaryReport,
)
@utils.requires_login
def list_cleared_orders(
self, bet_status, event_type_ids, event_ids, market_ids,
runner_ids, bet_ids, side, settled_date_range, group_by,
include_item_description, locale, from_record, record_count):
"""
:param bet_status:
:param event_type_ids:
:param event_ids:
:param market_ids:
:param runner_ids:
:param bet_ids:
:param side:
:param settled_date_range:
:param group_by:
:param include_item_description:
:param locale:
:param from_record:
:param record_count:
"""
return self.make_api_request(
'Sports',
'listClearedOrders',
utils.get_kwargs(locals()),
model=models.ClearedOrderSummaryReport,
)
@utils.requires_login
def place_orders(self, market_id, instructions, customer_ref=None):
"""Place new orders into market. This operation is atomic in that all
orders will be placed or none will be placed.
:param str market_id: The market id these orders are to be placed on
:param list instructions: List of `PlaceInstruction` objects
:param str customer_ref: Optional order identifier string
"""
return self.make_api_request(
'Sports',
'placeOrders',
utils.get_kwargs(locals()),
model=models.PlaceExecutionReport,
)
@utils.requires_login
def cancel_orders(self, market_id, instructions, customer_ref=None):
"""Cancel all bets OR cancel all bets on a market OR fully or
partially cancel particular orders on a market.
:param str market_id: If not supplied all bets are cancelled
:param list instructions: List of `CancelInstruction` objects
:param str customer_ref: Optional order identifier string
"""
return self.make_api_request(
'Sports',
'cancelOrders',
utils.get_kwargs(locals()),
model=models.CancelExecutionReport,
)
@utils.requires_login
def replace_orders(self, market_id, instructions, customer_ref=None):
"""This operation is logically a bulk cancel followed by a bulk place.
The cancel is completed first then the new orders are placed.
:param str market_id: The market id these orders are to be placed on
:param list instructions: List of `ReplaceInstruction` objects
:param str customer_ref: Optional order identifier string
"""
return self.make_api_request(
'Sports',
'replaceOrders',
utils.get_kwargs(locals()),
model=models.ReplaceExecutionReport,
)
@utils.requires_login
def update_orders(self, market_id, instructions, customer_ref=None):
"""Update non-exposure changing fields.
:param str market_id: The market id these orders are to be placed on
:param list instructions: List of `UpdateInstruction` objects
:param str customer_ref: Optional order identifier string
"""
return self.make_api_request(
'Sports',
'updateOrders',
utils.get_kwargs(locals()),
model=models.UpdateExecutionReport,
)
@utils.requires_login
def get_account_funds(self, wallet=None):
"""Get available to bet amount.
:param Wallet wallet: Name of the wallet in question
"""
return self.make_api_request(
'Account',
'getAccountFunds',
utils.get_kwargs(locals()),
model=models.AccountFundsResponse,
)
@utils.requires_login
def get_account_statement(
self, locale=None, from_record=None, record_count=None,
item_date_range=None, include_item=None, wallet=None):
"""Get account statement.
:param str locale: The language to be used where applicable
:param int from_record: Specifies the first record that will be returned
:param int record_count: Specifies the maximum number of records to be returned
:param TimeRange item_date_range: Return items with an itemDate within this date range
:param IncludeItem include_item: Which items to include
:param Wallet wallte: Which wallet to return statementItems for
"""
return self.make_api_request(
'Account',
'getAccountStatement',
utils.get_kwargs(locals()),
model=models.AccountStatementReport,
)
@utils.requires_login
def get_account_details(self):
"""Returns the details relating your account, including your discount
rate and Betfair point balance.
"""
return self.make_api_request(
'Account',
'getAccountDetails',
utils.get_kwargs(locals()),
model=models.AccountDetailsResponse,
)
@utils.requires_login
def list_currency_rates(self, from_currency=None):
"""Returns a list of currency rates based on given currency
:param str from_currency: The currency from which the rates are computed
"""
return self.make_api_request(
'Account',
'listCurrencyRates',
utils.get_kwargs(locals()),
model=models.CurrencyRate,
)
@utils.requires_login
|
xflr6/concepts | concepts/visualize.py | lattice | python | def lattice(lattice, filename, directory, render, view, **kwargs):
dot = graphviz.Digraph(
name=lattice.__class__.__name__,
comment=repr(lattice),
filename=filename,
directory=directory,
node_attr=dict(shape='circle', width='.25', style='filled', label=''),
edge_attr=dict(dir='none', labeldistance='1.5', minlen='2'),
**kwargs
)
sortkey = SORTKEYS[0]
node_name = NAME_GETTERS[0]
for concept in lattice._concepts:
name = node_name(concept)
dot.node(name)
if concept.objects:
dot.edge(name, name,
headlabel=' '.join(concept.objects),
labelangle='270', color='transparent')
if concept.properties:
dot.edge(name, name,
taillabel=' '.join(concept.properties),
labelangle='90', color='transparent')
dot.edges((name, node_name(c))
for c in sorted(concept.lower_neighbors, key=sortkey))
if render or view:
dot.render(view=view) # pragma: no cover
return dot | Return graphviz source for visualizing the lattice graph. | train | https://github.com/xflr6/concepts/blob/2801b27b05fa02cccee7d549451810ffcbf5c942/concepts/visualize.py#L15-L50 | null | # visualize.py - convert lattice to graphviz dot
import os
import glob
import graphviz
__all__ = ['lattice', 'render_all']
SORTKEYS = [lambda c: c.index]
NAME_GETTERS = [lambda c: 'c%d' % c.index]
def render_all(filepattern='*.cxt', frmat=None, encoding=None,
directory=None, out_format=None): # pragma: no cover
from concepts import Context
if directory is not None:
get_name = lambda filename: os.path.basename(filename)
else:
get_name = lambda filename: filename
if frmat is None:
from concepts.formats import Format
get_frmat = Format.by_extension.get
else:
get_frmat = lambda filename: frmat
for cxtfile in glob.glob(filepattern):
name, ext = os.path.splitext(cxtfile)
filename = '%s.gv' % get_name(name)
c = Context.fromfile(cxtfile, get_frmat(ext), encoding=encoding)
l = c.lattice
dot = l.graphviz(filename, directory)
if out_format is not None:
dot.format = out_format
dot.render()
|
xflr6/concepts | concepts/formats.py | Format.load | python | def load(cls, filename, encoding):
if encoding is None:
encoding = cls.encoding
with io.open(filename, 'r', encoding=encoding) as fd:
source = fd.read()
if cls.normalize_newlines:
source = source.replace('\r\n', '\n').replace('\r', '\n')
return cls.loads(source) | Load and parse serialized objects, properties, bools from file. | train | https://github.com/xflr6/concepts/blob/2801b27b05fa02cccee7d549451810ffcbf5c942/concepts/formats.py#L67-L77 | null | class Format(with_metaclass(FormatMeta, object)):
"""Parse and serialize formal contexts in a specific string format."""
__abstract__ = True
encoding = None
normalize_newlines = True
@staticmethod
def loads(source, **kwargs):
"""Parse source string and return ``(objects, properties, bools)``."""
raise NotImplementedError # pragma: no cover
@staticmethod
def dumps(objects, properties, bools, **kwargs):
"""Serialize ``(objects, properties, bools)`` and return string."""
raise NotImplementedError # pragma: no cover
@classmethod
@classmethod
def dump(cls, filename, objects, properties, bools, encoding):
"""Write serialized objects, properties, bools to file."""
if encoding is None:
encoding = cls.encoding
source = cls.dumps(objects, properties, bools)
if PY2:
source = unicode(source)
with io.open(filename, 'w', encoding=encoding) as fd:
fd.write(source)
|
xflr6/concepts | concepts/formats.py | Format.dump | python | def dump(cls, filename, objects, properties, bools, encoding):
if encoding is None:
encoding = cls.encoding
source = cls.dumps(objects, properties, bools)
if PY2:
source = unicode(source)
with io.open(filename, 'w', encoding=encoding) as fd:
fd.write(source) | Write serialized objects, properties, bools to file. | train | https://github.com/xflr6/concepts/blob/2801b27b05fa02cccee7d549451810ffcbf5c942/concepts/formats.py#L80-L90 | null | class Format(with_metaclass(FormatMeta, object)):
"""Parse and serialize formal contexts in a specific string format."""
__abstract__ = True
encoding = None
normalize_newlines = True
@staticmethod
def loads(source, **kwargs):
"""Parse source string and return ``(objects, properties, bools)``."""
raise NotImplementedError # pragma: no cover
@staticmethod
def dumps(objects, properties, bools, **kwargs):
"""Serialize ``(objects, properties, bools)`` and return string."""
raise NotImplementedError # pragma: no cover
@classmethod
def load(cls, filename, encoding):
"""Load and parse serialized objects, properties, bools from file."""
if encoding is None:
encoding = cls.encoding
with io.open(filename, 'r', encoding=encoding) as fd:
source = fd.read()
if cls.normalize_newlines:
source = source.replace('\r\n', '\n').replace('\r', '\n')
return cls.loads(source)
@classmethod
|
xflr6/concepts | concepts/__init__.py | load_csv | python | def load_csv(filename, dialect='excel', encoding='utf-8'):
return Context.fromfile(filename, 'csv', encoding, dialect=dialect) | Load and return formal context from CSV file.
Args:
filename: Path to the CSV file to load the context from.
dialect: Syntax variant of the CSV file (``'excel'``, ``'excel-tab'``).
encoding (str): Encoding of the file (``'utf-8'``, ``'latin1'``, ``'ascii'``, ...).
Example:
>>> load_csv('examples/vowels.csv') # doctest: +ELLIPSIS
<Context object mapping 12 objects to 8 properties [a717eee4] at 0x...> | train | https://github.com/xflr6/concepts/blob/2801b27b05fa02cccee7d549451810ffcbf5c942/concepts/__init__.py#L61-L73 | null | # concepts - implement basic formal concept analysis
"""Formal Concept Analysis (FCA) with Python."""
from .contexts import Context
from .definitions import Definition
__all__ = [
'Context', 'Definition',
'load', 'load_cxt', 'load_csv',
'make_context',
]
__title__ = 'concepts'
__version__ = '0.8.1.dev0'
__author__ = 'Sebastian Bank <sebastian.bank@uni-leipzig.de>'
__license__ = 'MIT, see LICENSE.txt'
__copyright__ = 'Copyright (c) 2013-2019 Sebastian Bank'
EXAMPLE = '''
|+1|-1|+2|-2|+3|-3|+sg|+pl|-sg|-pl|
1sg| X| | | X| | X| X| | | X|
1pl| X| | | X| | X| | X| X| |
2sg| | X| X| | | X| X| | | X|
2pl| | X| X| | | X| | X| X| |
3sg| | X| | X| X| | X| | | X|
3pl| | X| | X| X| | | X| X| |
'''
def load(filename, encoding='utf-8', frmat=None):
"""Load and return formal context from file.
Args:
filename: Path to the file to load the context from.
encoding (str): Encoding of the file (``'utf-8'``, ``'latin1'``, ``'ascii'``, ...).
frmat(str): Format of the file (``'table'``, ``'cxt'``, ``'csv'``).
If None, inferred from filename suffix (default).
Example:
>>> load('examples/liveinwater.txt') # doctest: +ELLIPSIS
<Context object mapping 8 objects to 9 properties [b1e86589] at 0x...>
"""
return Context.fromfile(filename, frmat, encoding)
def load_cxt(filename, encoding=None):
"""Load and return formal context from CXT file.
Args:
filename: Path to the CXT file to load the context from.
encoding (str): Encoding of the file (``'utf-8'``, ``'latin1'``, ``'ascii'``, ...).
Example:
>>> load_cxt('examples/digits.cxt') # doctest: +ELLIPSIS
<Context object mapping 10 objects to 7 properties [51e571e6] at 0x...>
"""
return Context.fromfile(filename, 'cxt', encoding)
def make_context(source, frmat='table'):
"""Return a new context from source string in the given format.
Args:
source (str): Formal context table as plain-text string.
frmat (str): Format of the context string (``'table'``, ``'cxt'``, ``'csv'``).
Example:
>>> make_context('''
... |male|female|adult|child|
... man | X | | X | |
... woman| | X | X | |
... boy | X | | | X |
... girl | | X | | X |
... ''') # doctest: +ELLIPSIS
<Context object mapping 4 objects to 4 properties [65aa9782] at 0x...>
"""
return Context.fromstring(source, frmat=frmat)
|
xflr6/concepts | concepts/definitions.py | conflicting_pairs | python | def conflicting_pairs(left, right):
objects = left._objects & right._objects
properties = left._properties & right._properties
difference = left._pairs ^ right._pairs
for o in objects:
for p in properties:
if (o, p) in difference:
yield (o, p) | Yield all ``(object, property)`` pairs where the two definitions disagree. | train | https://github.com/xflr6/concepts/blob/2801b27b05fa02cccee7d549451810ffcbf5c942/concepts/definitions.py#L200-L208 | null | # definitions.py - mutable triples of object, properties, bools
"""Mutable formal context creation arguments with set-like operations."""
from ._compat import zip, py3_unicode_to_str
from . import formats, tools
__all__ = ['Definition']
@py3_unicode_to_str
class Triple(object):
"""Triple of ``(objects, properties, bools)`` for creating a context.
>>> t = Triple(['Mr. Praline', 'parrot'], ['alive', 'dead'],
... [(True, False), (False, True)])
>>> t # doctest: +NORMALIZE_WHITESPACE
<Triple(['Mr. Praline', 'parrot'], ['alive', 'dead'],
[(True, False), (False, True)])>
>>> print(t)
|alive|dead|
Mr. Praline|X | |
parrot | |X |
>>> tuple(t)
(('Mr. Praline', 'parrot'), ('alive', 'dead'), [(True, False), (False, True)])
>>> (t[0], t[1], t[2]) == (t.objects, t.properties, t.bools)
True
>>> t == (t.objects, t.properties, t.bools)
True
>>> t['Mr. Praline', 'alive']
True
>>> t['parrot', 'alive']
False
>>> t.take(['parrot'])
<Triple(['parrot'], ['alive', 'dead'], [(False, True)])>
>>> t.take(properties=['dead'])
<Triple(['Mr. Praline', 'parrot'], ['dead'], [(False,), (True,)])>
>>> t.take(['Brian'], ['alive', 'holy'])
Traceback (most recent call last):
...
KeyError: ['Brian', 'holy']
>>> t.take(['parrot', 'Mr. Praline'], ['alive'], reorder=True)
<Triple(['parrot', 'Mr. Praline'], ['alive'], [(False,), (True,)])>
>>> print(t.transposed())
|Mr. Praline|parrot|
alive|X | |
dead | |X |
>>> print(t.inverted())
|alive|dead|
Mr. Praline| |X |
parrot |X | |
"""
@classmethod
def fromfile(cls, filename, frmat='cxt', encoding=None, **kwargs):
"""Return a new definiton from file source in given format."""
frmat = formats.Format[frmat]
objects, properties, bools = frmat.load(filename, encoding, **kwargs)
return cls(objects, properties, bools)
@classmethod
def _fromargs(cls, _objects, _properties, _pairs):
inst = super(Triple, cls).__new__(cls)
inst._objects = _objects
inst._properties = _properties
inst._pairs = _pairs
return inst
def __init__(self, objects=(), properties=(), bools=()):
self._objects = tools.Unique(objects)
if len(self._objects) != len(objects):
raise ValueError('duplicate objects: %r' % (objects,))
self._properties = tools.Unique(properties)
if len(self._properties) != len(properties):
raise ValueError('duplicate properties: %r' % (properties,))
self._pairs = {(o, p) for o, boo in zip(objects, bools)
for p, b in zip(properties, boo) if b}
def copy(self):
"""Return an independent copy of the instance."""
return self._fromargs(self._objects.copy(), self._properties.copy(),
self._pairs.copy())
def __iter__(self):
"""Yield ``objects``, ``properties``, and ``bools`` (e.g. for triple unpacking)."""
yield self.objects
yield self.properties
yield self.bools
def __getitem__(self, pair):
if isinstance(pair, int):
return list(self)[pair]
o, p = pair
if o not in self._objects or p not in self._properties:
raise KeyError(pair)
return pair in self._pairs
def __eq__(self, other):
if isinstance(other, Triple): # order insensitive
return (self._objects == other._objects and
self._properties == other._properties and
self._pairs == other._pairs)
return (self.objects, self.properties, self.bools) == other
def __ne__(self, other):
return not self == other
@property
def objects(self):
"""(Names of the) objects described by the definition."""
return tuple(self._objects)
@property
def properties(self):
"""(Names of the) properties that describe the objects."""
return tuple(self._properties)
@property
def bools(self):
"""Row-major list of boolean tuples."""
prop = self._properties
pairs = self._pairs
return [tuple((o, p) in pairs for p in prop)
for o in self._objects]
def __str__(self):
return self.tostring(escape=True)
def __unicode__(self):
return self.tostring()
def __repr__(self):
return '<%s(%r, %r, %r)>' % (self.__class__.__name__,
self._objects._items, self._properties._items, self.bools)
def tostring(self, frmat='table', **kwargs):
"""Return the definition serialized in the given string-based format."""
return formats.Format[frmat].dumps(*self, **kwargs)
def crc32(self, encoding='utf-8'):
"""Return hex-encoded unsigned CRC32 over encoded definition table string."""
return tools.crc32_hex(self.tostring().encode(encoding))
def take(self, objects=None, properties=None, reorder=False):
"""Return a subset with given objects/properties as new definition."""
if (objects and not self._objects.issuperset(objects) or
properties and not self._properties.issuperset(properties)):
notfound = (self._objects.rsub(objects or ()) |
self._properties.rsub(properties or ()))
raise KeyError(list(notfound))
if reorder:
obj = tools.Unique(objects) if objects is not None else self._objects.copy()
prop = tools.Unique(properties) if properties is not None else self._properties.copy()
else:
obj = self._objects.copy()
prop = self._properties.copy()
if objects is not None:
obj &= objects
if properties is not None:
prop &= properties
pairs = self._pairs
_pairs = {(o, p) for o in obj for p in prop if (o, p) in pairs}
return self._fromargs(obj, prop, _pairs)
def transposed(self):
"""Return a new definition swapping ``objects`` and ``properties``."""
_pairs = {(p, o) for (o, p) in self._pairs}
return self._fromargs(self._properties.copy(), self._objects.copy(), _pairs)
def inverted(self):
"""Return a new definition flipping all booleans."""
pairs = self._pairs
_pairs = {(o, p) for o in self._objects for p in self._properties
if (o, p) not in pairs}
return self._fromargs(self._objects.copy(), self._properties.copy(), _pairs)
__neg__ = transposed
__invert__ = inverted
def ensure_compatible(left, right):
"""Raise an informative ``ValueError`` if the two definitions disagree."""
conflicts = list(conflicting_pairs(left, right))
if conflicts:
raise ValueError('conflicting values for object/property pairs: %r' % conflicts)
class Definition(Triple):
"""Mutable triple of ``(objects, properties, bools)`` for creating a context.
Example:
>>> Definition(['man', 'woman'], ['male', 'female'], [(True, False), (False, True)])
<Definition(['man', 'woman'], ['male', 'female'], [(True, False), (False, True)])>
Usage:
>>> d = Definition()
>>> d
<Definition([], [], [])>
>>> d.add_object('King Arthur')
>>> print(d)
|
King Arthur|
>>> d.add_object('Sir Robin', ['human', 'knight'])
>>> d.add_object('holy grail')
>>> print(d)
|human|knight|
King Arthur| | |
Sir Robin |X |X |
holy grail | | |
>>> d.add_object('King Arthur', ['human', 'knight', 'king'])
>>> d.add_property('mysterious', ['holy grail', 'Sir Robin'])
>>> print(d)
|human|knight|king|mysterious|
King Arthur|X |X |X | |
Sir Robin |X |X | |X |
holy grail | | | |X |
>>> d['Sir Robin', 'mysterious'] = False
>>> print(d)
|human|knight|king|mysterious|
King Arthur|X |X |X | |
Sir Robin |X |X | | |
holy grail | | | |X |
>>> e = d.copy()
>>> e.move_object('holy grail', 0)
>>> e.move_property('mysterious', 0)
>>> e.move_property('king', 1)
>>> print(e)
|mysterious|king|human|knight|
holy grail |X | | | |
King Arthur| |X |X |X |
Sir Robin | | |X |X |
>>> e = d.copy()
>>> e.rename_object('Sir Robin', 'Launcelot')
>>> e.add_property('brave', ['Launcelot'])
>>> e.rename_object('holy grail', 'grail')
>>> e.rename_property('mysterious', 'holy')
>>> print(e)
|human|knight|king|holy|brave|
King Arthur|X |X |X | | |
Launcelot |X |X | | |X |
grail | | | |X | |
>>> print(e | d)
|human|knight|king|holy|brave|mysterious|
King Arthur|X |X |X | | | |
Launcelot |X |X | | |X | |
grail | | | |X | | |
Sir Robin |X |X | | | | |
holy grail | | | | | |X |
>>> print(e & d)
|human|knight|king|
King Arthur|X |X |X |
>>> e.remove_object('grail')
>>> e.remove_property('holy')
>>> e.rename_object('King Arthur', 'Arthur')
>>> e.set_property('king', [])
>>> e.set_object('Launcelot', ['human'])
>>> print(e)
|human|knight|king|brave|
Arthur |X |X | | |
Launcelot|X | | | |
>>> e.set_property('knight', ['Launcelot'])
>>> print(e)
|human|knight|king|brave|
Arthur |X | | | |
Launcelot|X |X | | |
"""
def rename_object(self, old, new):
"""Replace the name of an object by a new one."""
self._objects.replace(old, new)
pairs = self._pairs
pairs |= {(new, p) for p in self._properties
if (old, p) in pairs and not pairs.remove((old, p))}
def rename_property(self, old, new):
"""Replace the name of a property by a new one."""
self._properties.replace(old, new)
pairs = self._pairs
pairs |= {(o, new) for o in self._objects
if (o, old) in pairs and not pairs.remove((o, old))}
def move_object(self, obj, index):
"""Reorder the definition such that object is at ``index``."""
self._objects.move(obj, index)
def move_property(self, prop, index):
"""Reorder the definition such that property is at ``index``."""
self._properties.move(prop, index)
def __setitem__(self, pair, value):
if isinstance(pair, int):
raise ValueError("can't set item")
o, p = pair
self._objects.add(o)
self._properties.add(p)
if value:
self._pairs.add(pair)
else:
self._pairs.discard(pair)
def add_object(self, obj, properties=()):
"""Add an object to the definition and add ``properties`` as related."""
self._objects.add(obj)
self._properties |= properties
self._pairs.update((obj, p) for p in properties)
def add_property(self, prop, objects=()):
"""Add a property to the definition and add ``objects`` as related."""
self._properties.add(prop)
self._objects |= objects
self._pairs.update((o, prop) for o in objects)
def remove_object(self, obj):
"""Remove an object from the definition."""
self._objects.remove(obj)
self._pairs.difference_update((obj, p) for p in self._properties)
def remove_property(self, prop):
"""Remove a property from the definition."""
self._properties.remove(prop)
self._pairs.difference_update((o, prop) for o in self._objects)
def set_object(self, obj, properties):
"""Add an object to the definition and set its ``properties``."""
self._objects.add(obj)
properties = set(properties)
self._properties |= properties
pairs = self._pairs
for p in self._properties:
if p in properties:
pairs.add((obj, p))
else:
pairs.discard((obj, p))
def set_property(self, prop, objects):
"""Add a property to the definition and set its ``objects``."""
self._properties.add(prop)
objects = set(objects)
self._objects |= objects
pairs = self._pairs
for o in self._objects:
if o in objects:
pairs.add((o, prop))
else:
pairs.discard((o, prop))
def union_update(self, other, ignore_conflicts=False):
"""Update the definition with the union of the ``other``."""
if not ignore_conflicts:
ensure_compatible(self, other)
self._objects |= other._objects
self._properties |= other._properties
self._pairs |= other._pairs
def intersection_update(self, other, ignore_conflicts=False):
"""Update the definition with the intersection of the ``other``."""
if not ignore_conflicts:
ensure_compatible(self, other)
self._objects &= other._objects
self._properties &= other._properties
self._pairs &= other._pairs
def __ior__(self, other):
self.union_update(other)
return self
def __iand__(self, other):
self.intersection_update(other)
return self
def union(self, other, ignore_conflicts=False):
"""Return a new definition from the union of the definitions."""
result = self.copy()
result.union_update(other, ignore_conflicts)
return result
def intersection(self, other, ignore_conflicts=False):
"""Return a new definition from the intersection of the definitions."""
result = self.copy()
result.intersection_update(other, ignore_conflicts)
return result
__or__ = union
__and__ = intersection
|
xflr6/concepts | concepts/definitions.py | ensure_compatible | python | def ensure_compatible(left, right):
conflicts = list(conflicting_pairs(left, right))
if conflicts:
raise ValueError('conflicting values for object/property pairs: %r' % conflicts) | Raise an informative ``ValueError`` if the two definitions disagree. | train | https://github.com/xflr6/concepts/blob/2801b27b05fa02cccee7d549451810ffcbf5c942/concepts/definitions.py#L211-L215 | [
"def conflicting_pairs(left, right):\n \"\"\"Yield all ``(object, property)`` pairs where the two definitions disagree.\"\"\"\n objects = left._objects & right._objects\n properties = left._properties & right._properties\n difference = left._pairs ^ right._pairs\n for o in objects:\n for p in ... | # definitions.py - mutable triples of object, properties, bools
"""Mutable formal context creation arguments with set-like operations."""
from ._compat import zip, py3_unicode_to_str
from . import formats, tools
__all__ = ['Definition']
@py3_unicode_to_str
class Triple(object):
"""Triple of ``(objects, properties, bools)`` for creating a context.
>>> t = Triple(['Mr. Praline', 'parrot'], ['alive', 'dead'],
... [(True, False), (False, True)])
>>> t # doctest: +NORMALIZE_WHITESPACE
<Triple(['Mr. Praline', 'parrot'], ['alive', 'dead'],
[(True, False), (False, True)])>
>>> print(t)
|alive|dead|
Mr. Praline|X | |
parrot | |X |
>>> tuple(t)
(('Mr. Praline', 'parrot'), ('alive', 'dead'), [(True, False), (False, True)])
>>> (t[0], t[1], t[2]) == (t.objects, t.properties, t.bools)
True
>>> t == (t.objects, t.properties, t.bools)
True
>>> t['Mr. Praline', 'alive']
True
>>> t['parrot', 'alive']
False
>>> t.take(['parrot'])
<Triple(['parrot'], ['alive', 'dead'], [(False, True)])>
>>> t.take(properties=['dead'])
<Triple(['Mr. Praline', 'parrot'], ['dead'], [(False,), (True,)])>
>>> t.take(['Brian'], ['alive', 'holy'])
Traceback (most recent call last):
...
KeyError: ['Brian', 'holy']
>>> t.take(['parrot', 'Mr. Praline'], ['alive'], reorder=True)
<Triple(['parrot', 'Mr. Praline'], ['alive'], [(False,), (True,)])>
>>> print(t.transposed())
|Mr. Praline|parrot|
alive|X | |
dead | |X |
>>> print(t.inverted())
|alive|dead|
Mr. Praline| |X |
parrot |X | |
"""
@classmethod
def fromfile(cls, filename, frmat='cxt', encoding=None, **kwargs):
"""Return a new definiton from file source in given format."""
frmat = formats.Format[frmat]
objects, properties, bools = frmat.load(filename, encoding, **kwargs)
return cls(objects, properties, bools)
@classmethod
def _fromargs(cls, _objects, _properties, _pairs):
inst = super(Triple, cls).__new__(cls)
inst._objects = _objects
inst._properties = _properties
inst._pairs = _pairs
return inst
def __init__(self, objects=(), properties=(), bools=()):
self._objects = tools.Unique(objects)
if len(self._objects) != len(objects):
raise ValueError('duplicate objects: %r' % (objects,))
self._properties = tools.Unique(properties)
if len(self._properties) != len(properties):
raise ValueError('duplicate properties: %r' % (properties,))
self._pairs = {(o, p) for o, boo in zip(objects, bools)
for p, b in zip(properties, boo) if b}
def copy(self):
"""Return an independent copy of the instance."""
return self._fromargs(self._objects.copy(), self._properties.copy(),
self._pairs.copy())
def __iter__(self):
"""Yield ``objects``, ``properties``, and ``bools`` (e.g. for triple unpacking)."""
yield self.objects
yield self.properties
yield self.bools
def __getitem__(self, pair):
if isinstance(pair, int):
return list(self)[pair]
o, p = pair
if o not in self._objects or p not in self._properties:
raise KeyError(pair)
return pair in self._pairs
def __eq__(self, other):
if isinstance(other, Triple): # order insensitive
return (self._objects == other._objects and
self._properties == other._properties and
self._pairs == other._pairs)
return (self.objects, self.properties, self.bools) == other
def __ne__(self, other):
return not self == other
@property
def objects(self):
"""(Names of the) objects described by the definition."""
return tuple(self._objects)
@property
def properties(self):
"""(Names of the) properties that describe the objects."""
return tuple(self._properties)
@property
def bools(self):
"""Row-major list of boolean tuples."""
prop = self._properties
pairs = self._pairs
return [tuple((o, p) in pairs for p in prop)
for o in self._objects]
def __str__(self):
return self.tostring(escape=True)
def __unicode__(self):
return self.tostring()
def __repr__(self):
return '<%s(%r, %r, %r)>' % (self.__class__.__name__,
self._objects._items, self._properties._items, self.bools)
def tostring(self, frmat='table', **kwargs):
"""Return the definition serialized in the given string-based format."""
return formats.Format[frmat].dumps(*self, **kwargs)
def crc32(self, encoding='utf-8'):
"""Return hex-encoded unsigned CRC32 over encoded definition table string."""
return tools.crc32_hex(self.tostring().encode(encoding))
def take(self, objects=None, properties=None, reorder=False):
"""Return a subset with given objects/properties as new definition."""
if (objects and not self._objects.issuperset(objects) or
properties and not self._properties.issuperset(properties)):
notfound = (self._objects.rsub(objects or ()) |
self._properties.rsub(properties or ()))
raise KeyError(list(notfound))
if reorder:
obj = tools.Unique(objects) if objects is not None else self._objects.copy()
prop = tools.Unique(properties) if properties is not None else self._properties.copy()
else:
obj = self._objects.copy()
prop = self._properties.copy()
if objects is not None:
obj &= objects
if properties is not None:
prop &= properties
pairs = self._pairs
_pairs = {(o, p) for o in obj for p in prop if (o, p) in pairs}
return self._fromargs(obj, prop, _pairs)
def transposed(self):
"""Return a new definition swapping ``objects`` and ``properties``."""
_pairs = {(p, o) for (o, p) in self._pairs}
return self._fromargs(self._properties.copy(), self._objects.copy(), _pairs)
def inverted(self):
"""Return a new definition flipping all booleans."""
pairs = self._pairs
_pairs = {(o, p) for o in self._objects for p in self._properties
if (o, p) not in pairs}
return self._fromargs(self._objects.copy(), self._properties.copy(), _pairs)
__neg__ = transposed
__invert__ = inverted
def conflicting_pairs(left, right):
"""Yield all ``(object, property)`` pairs where the two definitions disagree."""
objects = left._objects & right._objects
properties = left._properties & right._properties
difference = left._pairs ^ right._pairs
for o in objects:
for p in properties:
if (o, p) in difference:
yield (o, p)
class Definition(Triple):
"""Mutable triple of ``(objects, properties, bools)`` for creating a context.
Example:
>>> Definition(['man', 'woman'], ['male', 'female'], [(True, False), (False, True)])
<Definition(['man', 'woman'], ['male', 'female'], [(True, False), (False, True)])>
Usage:
>>> d = Definition()
>>> d
<Definition([], [], [])>
>>> d.add_object('King Arthur')
>>> print(d)
|
King Arthur|
>>> d.add_object('Sir Robin', ['human', 'knight'])
>>> d.add_object('holy grail')
>>> print(d)
|human|knight|
King Arthur| | |
Sir Robin |X |X |
holy grail | | |
>>> d.add_object('King Arthur', ['human', 'knight', 'king'])
>>> d.add_property('mysterious', ['holy grail', 'Sir Robin'])
>>> print(d)
|human|knight|king|mysterious|
King Arthur|X |X |X | |
Sir Robin |X |X | |X |
holy grail | | | |X |
>>> d['Sir Robin', 'mysterious'] = False
>>> print(d)
|human|knight|king|mysterious|
King Arthur|X |X |X | |
Sir Robin |X |X | | |
holy grail | | | |X |
>>> e = d.copy()
>>> e.move_object('holy grail', 0)
>>> e.move_property('mysterious', 0)
>>> e.move_property('king', 1)
>>> print(e)
|mysterious|king|human|knight|
holy grail |X | | | |
King Arthur| |X |X |X |
Sir Robin | | |X |X |
>>> e = d.copy()
>>> e.rename_object('Sir Robin', 'Launcelot')
>>> e.add_property('brave', ['Launcelot'])
>>> e.rename_object('holy grail', 'grail')
>>> e.rename_property('mysterious', 'holy')
>>> print(e)
|human|knight|king|holy|brave|
King Arthur|X |X |X | | |
Launcelot |X |X | | |X |
grail | | | |X | |
>>> print(e | d)
|human|knight|king|holy|brave|mysterious|
King Arthur|X |X |X | | | |
Launcelot |X |X | | |X | |
grail | | | |X | | |
Sir Robin |X |X | | | | |
holy grail | | | | | |X |
>>> print(e & d)
|human|knight|king|
King Arthur|X |X |X |
>>> e.remove_object('grail')
>>> e.remove_property('holy')
>>> e.rename_object('King Arthur', 'Arthur')
>>> e.set_property('king', [])
>>> e.set_object('Launcelot', ['human'])
>>> print(e)
|human|knight|king|brave|
Arthur |X |X | | |
Launcelot|X | | | |
>>> e.set_property('knight', ['Launcelot'])
>>> print(e)
|human|knight|king|brave|
Arthur |X | | | |
Launcelot|X |X | | |
"""
def rename_object(self, old, new):
"""Replace the name of an object by a new one."""
self._objects.replace(old, new)
pairs = self._pairs
pairs |= {(new, p) for p in self._properties
if (old, p) in pairs and not pairs.remove((old, p))}
def rename_property(self, old, new):
"""Replace the name of a property by a new one."""
self._properties.replace(old, new)
pairs = self._pairs
pairs |= {(o, new) for o in self._objects
if (o, old) in pairs and not pairs.remove((o, old))}
def move_object(self, obj, index):
"""Reorder the definition such that object is at ``index``."""
self._objects.move(obj, index)
def move_property(self, prop, index):
"""Reorder the definition such that property is at ``index``."""
self._properties.move(prop, index)
def __setitem__(self, pair, value):
if isinstance(pair, int):
raise ValueError("can't set item")
o, p = pair
self._objects.add(o)
self._properties.add(p)
if value:
self._pairs.add(pair)
else:
self._pairs.discard(pair)
def add_object(self, obj, properties=()):
"""Add an object to the definition and add ``properties`` as related."""
self._objects.add(obj)
self._properties |= properties
self._pairs.update((obj, p) for p in properties)
def add_property(self, prop, objects=()):
"""Add a property to the definition and add ``objects`` as related."""
self._properties.add(prop)
self._objects |= objects
self._pairs.update((o, prop) for o in objects)
def remove_object(self, obj):
"""Remove an object from the definition."""
self._objects.remove(obj)
self._pairs.difference_update((obj, p) for p in self._properties)
def remove_property(self, prop):
"""Remove a property from the definition."""
self._properties.remove(prop)
self._pairs.difference_update((o, prop) for o in self._objects)
def set_object(self, obj, properties):
"""Add an object to the definition and set its ``properties``."""
self._objects.add(obj)
properties = set(properties)
self._properties |= properties
pairs = self._pairs
for p in self._properties:
if p in properties:
pairs.add((obj, p))
else:
pairs.discard((obj, p))
def set_property(self, prop, objects):
"""Add a property to the definition and set its ``objects``."""
self._properties.add(prop)
objects = set(objects)
self._objects |= objects
pairs = self._pairs
for o in self._objects:
if o in objects:
pairs.add((o, prop))
else:
pairs.discard((o, prop))
def union_update(self, other, ignore_conflicts=False):
"""Update the definition with the union of the ``other``."""
if not ignore_conflicts:
ensure_compatible(self, other)
self._objects |= other._objects
self._properties |= other._properties
self._pairs |= other._pairs
def intersection_update(self, other, ignore_conflicts=False):
"""Update the definition with the intersection of the ``other``."""
if not ignore_conflicts:
ensure_compatible(self, other)
self._objects &= other._objects
self._properties &= other._properties
self._pairs &= other._pairs
def __ior__(self, other):
self.union_update(other)
return self
def __iand__(self, other):
self.intersection_update(other)
return self
def union(self, other, ignore_conflicts=False):
"""Return a new definition from the union of the definitions."""
result = self.copy()
result.union_update(other, ignore_conflicts)
return result
def intersection(self, other, ignore_conflicts=False):
"""Return a new definition from the intersection of the definitions."""
result = self.copy()
result.intersection_update(other, ignore_conflicts)
return result
__or__ = union
__and__ = intersection
|
xflr6/concepts | concepts/definitions.py | Definition.rename_object | python | def rename_object(self, old, new):
self._objects.replace(old, new)
pairs = self._pairs
pairs |= {(new, p) for p in self._properties
if (old, p) in pairs and not pairs.remove((old, p))} | Replace the name of an object by a new one. | train | https://github.com/xflr6/concepts/blob/2801b27b05fa02cccee7d549451810ffcbf5c942/concepts/definitions.py#L317-L322 | null | class Definition(Triple):
"""Mutable triple of ``(objects, properties, bools)`` for creating a context.
Example:
>>> Definition(['man', 'woman'], ['male', 'female'], [(True, False), (False, True)])
<Definition(['man', 'woman'], ['male', 'female'], [(True, False), (False, True)])>
Usage:
>>> d = Definition()
>>> d
<Definition([], [], [])>
>>> d.add_object('King Arthur')
>>> print(d)
|
King Arthur|
>>> d.add_object('Sir Robin', ['human', 'knight'])
>>> d.add_object('holy grail')
>>> print(d)
|human|knight|
King Arthur| | |
Sir Robin |X |X |
holy grail | | |
>>> d.add_object('King Arthur', ['human', 'knight', 'king'])
>>> d.add_property('mysterious', ['holy grail', 'Sir Robin'])
>>> print(d)
|human|knight|king|mysterious|
King Arthur|X |X |X | |
Sir Robin |X |X | |X |
holy grail | | | |X |
>>> d['Sir Robin', 'mysterious'] = False
>>> print(d)
|human|knight|king|mysterious|
King Arthur|X |X |X | |
Sir Robin |X |X | | |
holy grail | | | |X |
>>> e = d.copy()
>>> e.move_object('holy grail', 0)
>>> e.move_property('mysterious', 0)
>>> e.move_property('king', 1)
>>> print(e)
|mysterious|king|human|knight|
holy grail |X | | | |
King Arthur| |X |X |X |
Sir Robin | | |X |X |
>>> e = d.copy()
>>> e.rename_object('Sir Robin', 'Launcelot')
>>> e.add_property('brave', ['Launcelot'])
>>> e.rename_object('holy grail', 'grail')
>>> e.rename_property('mysterious', 'holy')
>>> print(e)
|human|knight|king|holy|brave|
King Arthur|X |X |X | | |
Launcelot |X |X | | |X |
grail | | | |X | |
>>> print(e | d)
|human|knight|king|holy|brave|mysterious|
King Arthur|X |X |X | | | |
Launcelot |X |X | | |X | |
grail | | | |X | | |
Sir Robin |X |X | | | | |
holy grail | | | | | |X |
>>> print(e & d)
|human|knight|king|
King Arthur|X |X |X |
>>> e.remove_object('grail')
>>> e.remove_property('holy')
>>> e.rename_object('King Arthur', 'Arthur')
>>> e.set_property('king', [])
>>> e.set_object('Launcelot', ['human'])
>>> print(e)
|human|knight|king|brave|
Arthur |X |X | | |
Launcelot|X | | | |
>>> e.set_property('knight', ['Launcelot'])
>>> print(e)
|human|knight|king|brave|
Arthur |X | | | |
Launcelot|X |X | | |
"""
def rename_property(self, old, new):
"""Replace the name of a property by a new one."""
self._properties.replace(old, new)
pairs = self._pairs
pairs |= {(o, new) for o in self._objects
if (o, old) in pairs and not pairs.remove((o, old))}
def move_object(self, obj, index):
"""Reorder the definition such that object is at ``index``."""
self._objects.move(obj, index)
def move_property(self, prop, index):
"""Reorder the definition such that property is at ``index``."""
self._properties.move(prop, index)
def __setitem__(self, pair, value):
if isinstance(pair, int):
raise ValueError("can't set item")
o, p = pair
self._objects.add(o)
self._properties.add(p)
if value:
self._pairs.add(pair)
else:
self._pairs.discard(pair)
def add_object(self, obj, properties=()):
"""Add an object to the definition and add ``properties`` as related."""
self._objects.add(obj)
self._properties |= properties
self._pairs.update((obj, p) for p in properties)
def add_property(self, prop, objects=()):
"""Add a property to the definition and add ``objects`` as related."""
self._properties.add(prop)
self._objects |= objects
self._pairs.update((o, prop) for o in objects)
def remove_object(self, obj):
"""Remove an object from the definition."""
self._objects.remove(obj)
self._pairs.difference_update((obj, p) for p in self._properties)
def remove_property(self, prop):
"""Remove a property from the definition."""
self._properties.remove(prop)
self._pairs.difference_update((o, prop) for o in self._objects)
def set_object(self, obj, properties):
"""Add an object to the definition and set its ``properties``."""
self._objects.add(obj)
properties = set(properties)
self._properties |= properties
pairs = self._pairs
for p in self._properties:
if p in properties:
pairs.add((obj, p))
else:
pairs.discard((obj, p))
def set_property(self, prop, objects):
"""Add a property to the definition and set its ``objects``."""
self._properties.add(prop)
objects = set(objects)
self._objects |= objects
pairs = self._pairs
for o in self._objects:
if o in objects:
pairs.add((o, prop))
else:
pairs.discard((o, prop))
def union_update(self, other, ignore_conflicts=False):
"""Update the definition with the union of the ``other``."""
if not ignore_conflicts:
ensure_compatible(self, other)
self._objects |= other._objects
self._properties |= other._properties
self._pairs |= other._pairs
def intersection_update(self, other, ignore_conflicts=False):
"""Update the definition with the intersection of the ``other``."""
if not ignore_conflicts:
ensure_compatible(self, other)
self._objects &= other._objects
self._properties &= other._properties
self._pairs &= other._pairs
def __ior__(self, other):
self.union_update(other)
return self
def __iand__(self, other):
self.intersection_update(other)
return self
def union(self, other, ignore_conflicts=False):
"""Return a new definition from the union of the definitions."""
result = self.copy()
result.union_update(other, ignore_conflicts)
return result
def intersection(self, other, ignore_conflicts=False):
"""Return a new definition from the intersection of the definitions."""
result = self.copy()
result.intersection_update(other, ignore_conflicts)
return result
__or__ = union
__and__ = intersection
|
xflr6/concepts | concepts/definitions.py | Definition.rename_property | python | def rename_property(self, old, new):
self._properties.replace(old, new)
pairs = self._pairs
pairs |= {(o, new) for o in self._objects
if (o, old) in pairs and not pairs.remove((o, old))} | Replace the name of a property by a new one. | train | https://github.com/xflr6/concepts/blob/2801b27b05fa02cccee7d549451810ffcbf5c942/concepts/definitions.py#L324-L329 | null | class Definition(Triple):
"""Mutable triple of ``(objects, properties, bools)`` for creating a context.
Example:
>>> Definition(['man', 'woman'], ['male', 'female'], [(True, False), (False, True)])
<Definition(['man', 'woman'], ['male', 'female'], [(True, False), (False, True)])>
Usage:
>>> d = Definition()
>>> d
<Definition([], [], [])>
>>> d.add_object('King Arthur')
>>> print(d)
|
King Arthur|
>>> d.add_object('Sir Robin', ['human', 'knight'])
>>> d.add_object('holy grail')
>>> print(d)
|human|knight|
King Arthur| | |
Sir Robin |X |X |
holy grail | | |
>>> d.add_object('King Arthur', ['human', 'knight', 'king'])
>>> d.add_property('mysterious', ['holy grail', 'Sir Robin'])
>>> print(d)
|human|knight|king|mysterious|
King Arthur|X |X |X | |
Sir Robin |X |X | |X |
holy grail | | | |X |
>>> d['Sir Robin', 'mysterious'] = False
>>> print(d)
|human|knight|king|mysterious|
King Arthur|X |X |X | |
Sir Robin |X |X | | |
holy grail | | | |X |
>>> e = d.copy()
>>> e.move_object('holy grail', 0)
>>> e.move_property('mysterious', 0)
>>> e.move_property('king', 1)
>>> print(e)
|mysterious|king|human|knight|
holy grail |X | | | |
King Arthur| |X |X |X |
Sir Robin | | |X |X |
>>> e = d.copy()
>>> e.rename_object('Sir Robin', 'Launcelot')
>>> e.add_property('brave', ['Launcelot'])
>>> e.rename_object('holy grail', 'grail')
>>> e.rename_property('mysterious', 'holy')
>>> print(e)
|human|knight|king|holy|brave|
King Arthur|X |X |X | | |
Launcelot |X |X | | |X |
grail | | | |X | |
>>> print(e | d)
|human|knight|king|holy|brave|mysterious|
King Arthur|X |X |X | | | |
Launcelot |X |X | | |X | |
grail | | | |X | | |
Sir Robin |X |X | | | | |
holy grail | | | | | |X |
>>> print(e & d)
|human|knight|king|
King Arthur|X |X |X |
>>> e.remove_object('grail')
>>> e.remove_property('holy')
>>> e.rename_object('King Arthur', 'Arthur')
>>> e.set_property('king', [])
>>> e.set_object('Launcelot', ['human'])
>>> print(e)
|human|knight|king|brave|
Arthur |X |X | | |
Launcelot|X | | | |
>>> e.set_property('knight', ['Launcelot'])
>>> print(e)
|human|knight|king|brave|
Arthur |X | | | |
Launcelot|X |X | | |
"""
def rename_object(self, old, new):
"""Replace the name of an object by a new one."""
self._objects.replace(old, new)
pairs = self._pairs
pairs |= {(new, p) for p in self._properties
if (old, p) in pairs and not pairs.remove((old, p))}
def move_object(self, obj, index):
"""Reorder the definition such that object is at ``index``."""
self._objects.move(obj, index)
def move_property(self, prop, index):
"""Reorder the definition such that property is at ``index``."""
self._properties.move(prop, index)
def __setitem__(self, pair, value):
if isinstance(pair, int):
raise ValueError("can't set item")
o, p = pair
self._objects.add(o)
self._properties.add(p)
if value:
self._pairs.add(pair)
else:
self._pairs.discard(pair)
def add_object(self, obj, properties=()):
"""Add an object to the definition and add ``properties`` as related."""
self._objects.add(obj)
self._properties |= properties
self._pairs.update((obj, p) for p in properties)
def add_property(self, prop, objects=()):
"""Add a property to the definition and add ``objects`` as related."""
self._properties.add(prop)
self._objects |= objects
self._pairs.update((o, prop) for o in objects)
def remove_object(self, obj):
"""Remove an object from the definition."""
self._objects.remove(obj)
self._pairs.difference_update((obj, p) for p in self._properties)
def remove_property(self, prop):
"""Remove a property from the definition."""
self._properties.remove(prop)
self._pairs.difference_update((o, prop) for o in self._objects)
def set_object(self, obj, properties):
"""Add an object to the definition and set its ``properties``."""
self._objects.add(obj)
properties = set(properties)
self._properties |= properties
pairs = self._pairs
for p in self._properties:
if p in properties:
pairs.add((obj, p))
else:
pairs.discard((obj, p))
def set_property(self, prop, objects):
"""Add a property to the definition and set its ``objects``."""
self._properties.add(prop)
objects = set(objects)
self._objects |= objects
pairs = self._pairs
for o in self._objects:
if o in objects:
pairs.add((o, prop))
else:
pairs.discard((o, prop))
def union_update(self, other, ignore_conflicts=False):
"""Update the definition with the union of the ``other``."""
if not ignore_conflicts:
ensure_compatible(self, other)
self._objects |= other._objects
self._properties |= other._properties
self._pairs |= other._pairs
def intersection_update(self, other, ignore_conflicts=False):
"""Update the definition with the intersection of the ``other``."""
if not ignore_conflicts:
ensure_compatible(self, other)
self._objects &= other._objects
self._properties &= other._properties
self._pairs &= other._pairs
def __ior__(self, other):
self.union_update(other)
return self
def __iand__(self, other):
self.intersection_update(other)
return self
def union(self, other, ignore_conflicts=False):
"""Return a new definition from the union of the definitions."""
result = self.copy()
result.union_update(other, ignore_conflicts)
return result
def intersection(self, other, ignore_conflicts=False):
"""Return a new definition from the intersection of the definitions."""
result = self.copy()
result.intersection_update(other, ignore_conflicts)
return result
__or__ = union
__and__ = intersection
|
xflr6/concepts | concepts/definitions.py | Definition.add_object | python | def add_object(self, obj, properties=()):
self._objects.add(obj)
self._properties |= properties
self._pairs.update((obj, p) for p in properties) | Add an object to the definition and add ``properties`` as related. | train | https://github.com/xflr6/concepts/blob/2801b27b05fa02cccee7d549451810ffcbf5c942/concepts/definitions.py#L350-L354 | null | class Definition(Triple):
"""Mutable triple of ``(objects, properties, bools)`` for creating a context.
Example:
>>> Definition(['man', 'woman'], ['male', 'female'], [(True, False), (False, True)])
<Definition(['man', 'woman'], ['male', 'female'], [(True, False), (False, True)])>
Usage:
>>> d = Definition()
>>> d
<Definition([], [], [])>
>>> d.add_object('King Arthur')
>>> print(d)
|
King Arthur|
>>> d.add_object('Sir Robin', ['human', 'knight'])
>>> d.add_object('holy grail')
>>> print(d)
|human|knight|
King Arthur| | |
Sir Robin |X |X |
holy grail | | |
>>> d.add_object('King Arthur', ['human', 'knight', 'king'])
>>> d.add_property('mysterious', ['holy grail', 'Sir Robin'])
>>> print(d)
|human|knight|king|mysterious|
King Arthur|X |X |X | |
Sir Robin |X |X | |X |
holy grail | | | |X |
>>> d['Sir Robin', 'mysterious'] = False
>>> print(d)
|human|knight|king|mysterious|
King Arthur|X |X |X | |
Sir Robin |X |X | | |
holy grail | | | |X |
>>> e = d.copy()
>>> e.move_object('holy grail', 0)
>>> e.move_property('mysterious', 0)
>>> e.move_property('king', 1)
>>> print(e)
|mysterious|king|human|knight|
holy grail |X | | | |
King Arthur| |X |X |X |
Sir Robin | | |X |X |
>>> e = d.copy()
>>> e.rename_object('Sir Robin', 'Launcelot')
>>> e.add_property('brave', ['Launcelot'])
>>> e.rename_object('holy grail', 'grail')
>>> e.rename_property('mysterious', 'holy')
>>> print(e)
|human|knight|king|holy|brave|
King Arthur|X |X |X | | |
Launcelot |X |X | | |X |
grail | | | |X | |
>>> print(e | d)
|human|knight|king|holy|brave|mysterious|
King Arthur|X |X |X | | | |
Launcelot |X |X | | |X | |
grail | | | |X | | |
Sir Robin |X |X | | | | |
holy grail | | | | | |X |
>>> print(e & d)
|human|knight|king|
King Arthur|X |X |X |
>>> e.remove_object('grail')
>>> e.remove_property('holy')
>>> e.rename_object('King Arthur', 'Arthur')
>>> e.set_property('king', [])
>>> e.set_object('Launcelot', ['human'])
>>> print(e)
|human|knight|king|brave|
Arthur |X |X | | |
Launcelot|X | | | |
>>> e.set_property('knight', ['Launcelot'])
>>> print(e)
|human|knight|king|brave|
Arthur |X | | | |
Launcelot|X |X | | |
"""
def rename_object(self, old, new):
"""Replace the name of an object by a new one."""
self._objects.replace(old, new)
pairs = self._pairs
pairs |= {(new, p) for p in self._properties
if (old, p) in pairs and not pairs.remove((old, p))}
def rename_property(self, old, new):
"""Replace the name of a property by a new one."""
self._properties.replace(old, new)
pairs = self._pairs
pairs |= {(o, new) for o in self._objects
if (o, old) in pairs and not pairs.remove((o, old))}
def move_object(self, obj, index):
"""Reorder the definition such that object is at ``index``."""
self._objects.move(obj, index)
def move_property(self, prop, index):
"""Reorder the definition such that property is at ``index``."""
self._properties.move(prop, index)
def __setitem__(self, pair, value):
if isinstance(pair, int):
raise ValueError("can't set item")
o, p = pair
self._objects.add(o)
self._properties.add(p)
if value:
self._pairs.add(pair)
else:
self._pairs.discard(pair)
def add_property(self, prop, objects=()):
"""Add a property to the definition and add ``objects`` as related."""
self._properties.add(prop)
self._objects |= objects
self._pairs.update((o, prop) for o in objects)
def remove_object(self, obj):
"""Remove an object from the definition."""
self._objects.remove(obj)
self._pairs.difference_update((obj, p) for p in self._properties)
def remove_property(self, prop):
"""Remove a property from the definition."""
self._properties.remove(prop)
self._pairs.difference_update((o, prop) for o in self._objects)
def set_object(self, obj, properties):
"""Add an object to the definition and set its ``properties``."""
self._objects.add(obj)
properties = set(properties)
self._properties |= properties
pairs = self._pairs
for p in self._properties:
if p in properties:
pairs.add((obj, p))
else:
pairs.discard((obj, p))
def set_property(self, prop, objects):
"""Add a property to the definition and set its ``objects``."""
self._properties.add(prop)
objects = set(objects)
self._objects |= objects
pairs = self._pairs
for o in self._objects:
if o in objects:
pairs.add((o, prop))
else:
pairs.discard((o, prop))
def union_update(self, other, ignore_conflicts=False):
"""Update the definition with the union of the ``other``."""
if not ignore_conflicts:
ensure_compatible(self, other)
self._objects |= other._objects
self._properties |= other._properties
self._pairs |= other._pairs
def intersection_update(self, other, ignore_conflicts=False):
"""Update the definition with the intersection of the ``other``."""
if not ignore_conflicts:
ensure_compatible(self, other)
self._objects &= other._objects
self._properties &= other._properties
self._pairs &= other._pairs
def __ior__(self, other):
self.union_update(other)
return self
def __iand__(self, other):
self.intersection_update(other)
return self
def union(self, other, ignore_conflicts=False):
"""Return a new definition from the union of the definitions."""
result = self.copy()
result.union_update(other, ignore_conflicts)
return result
def intersection(self, other, ignore_conflicts=False):
"""Return a new definition from the intersection of the definitions."""
result = self.copy()
result.intersection_update(other, ignore_conflicts)
return result
__or__ = union
__and__ = intersection
|
xflr6/concepts | concepts/definitions.py | Definition.add_property | python | def add_property(self, prop, objects=()):
self._properties.add(prop)
self._objects |= objects
self._pairs.update((o, prop) for o in objects) | Add a property to the definition and add ``objects`` as related. | train | https://github.com/xflr6/concepts/blob/2801b27b05fa02cccee7d549451810ffcbf5c942/concepts/definitions.py#L356-L360 | null | class Definition(Triple):
"""Mutable triple of ``(objects, properties, bools)`` for creating a context.
Example:
>>> Definition(['man', 'woman'], ['male', 'female'], [(True, False), (False, True)])
<Definition(['man', 'woman'], ['male', 'female'], [(True, False), (False, True)])>
Usage:
>>> d = Definition()
>>> d
<Definition([], [], [])>
>>> d.add_object('King Arthur')
>>> print(d)
|
King Arthur|
>>> d.add_object('Sir Robin', ['human', 'knight'])
>>> d.add_object('holy grail')
>>> print(d)
|human|knight|
King Arthur| | |
Sir Robin |X |X |
holy grail | | |
>>> d.add_object('King Arthur', ['human', 'knight', 'king'])
>>> d.add_property('mysterious', ['holy grail', 'Sir Robin'])
>>> print(d)
|human|knight|king|mysterious|
King Arthur|X |X |X | |
Sir Robin |X |X | |X |
holy grail | | | |X |
>>> d['Sir Robin', 'mysterious'] = False
>>> print(d)
|human|knight|king|mysterious|
King Arthur|X |X |X | |
Sir Robin |X |X | | |
holy grail | | | |X |
>>> e = d.copy()
>>> e.move_object('holy grail', 0)
>>> e.move_property('mysterious', 0)
>>> e.move_property('king', 1)
>>> print(e)
|mysterious|king|human|knight|
holy grail |X | | | |
King Arthur| |X |X |X |
Sir Robin | | |X |X |
>>> e = d.copy()
>>> e.rename_object('Sir Robin', 'Launcelot')
>>> e.add_property('brave', ['Launcelot'])
>>> e.rename_object('holy grail', 'grail')
>>> e.rename_property('mysterious', 'holy')
>>> print(e)
|human|knight|king|holy|brave|
King Arthur|X |X |X | | |
Launcelot |X |X | | |X |
grail | | | |X | |
>>> print(e | d)
|human|knight|king|holy|brave|mysterious|
King Arthur|X |X |X | | | |
Launcelot |X |X | | |X | |
grail | | | |X | | |
Sir Robin |X |X | | | | |
holy grail | | | | | |X |
>>> print(e & d)
|human|knight|king|
King Arthur|X |X |X |
>>> e.remove_object('grail')
>>> e.remove_property('holy')
>>> e.rename_object('King Arthur', 'Arthur')
>>> e.set_property('king', [])
>>> e.set_object('Launcelot', ['human'])
>>> print(e)
|human|knight|king|brave|
Arthur |X |X | | |
Launcelot|X | | | |
>>> e.set_property('knight', ['Launcelot'])
>>> print(e)
|human|knight|king|brave|
Arthur |X | | | |
Launcelot|X |X | | |
"""
def rename_object(self, old, new):
"""Replace the name of an object by a new one."""
self._objects.replace(old, new)
pairs = self._pairs
pairs |= {(new, p) for p in self._properties
if (old, p) in pairs and not pairs.remove((old, p))}
def rename_property(self, old, new):
"""Replace the name of a property by a new one."""
self._properties.replace(old, new)
pairs = self._pairs
pairs |= {(o, new) for o in self._objects
if (o, old) in pairs and not pairs.remove((o, old))}
def move_object(self, obj, index):
"""Reorder the definition such that object is at ``index``."""
self._objects.move(obj, index)
def move_property(self, prop, index):
"""Reorder the definition such that property is at ``index``."""
self._properties.move(prop, index)
def __setitem__(self, pair, value):
if isinstance(pair, int):
raise ValueError("can't set item")
o, p = pair
self._objects.add(o)
self._properties.add(p)
if value:
self._pairs.add(pair)
else:
self._pairs.discard(pair)
def add_object(self, obj, properties=()):
"""Add an object to the definition and add ``properties`` as related."""
self._objects.add(obj)
self._properties |= properties
self._pairs.update((obj, p) for p in properties)
def remove_object(self, obj):
"""Remove an object from the definition."""
self._objects.remove(obj)
self._pairs.difference_update((obj, p) for p in self._properties)
def remove_property(self, prop):
"""Remove a property from the definition."""
self._properties.remove(prop)
self._pairs.difference_update((o, prop) for o in self._objects)
def set_object(self, obj, properties):
"""Add an object to the definition and set its ``properties``."""
self._objects.add(obj)
properties = set(properties)
self._properties |= properties
pairs = self._pairs
for p in self._properties:
if p in properties:
pairs.add((obj, p))
else:
pairs.discard((obj, p))
def set_property(self, prop, objects):
"""Add a property to the definition and set its ``objects``."""
self._properties.add(prop)
objects = set(objects)
self._objects |= objects
pairs = self._pairs
for o in self._objects:
if o in objects:
pairs.add((o, prop))
else:
pairs.discard((o, prop))
def union_update(self, other, ignore_conflicts=False):
"""Update the definition with the union of the ``other``."""
if not ignore_conflicts:
ensure_compatible(self, other)
self._objects |= other._objects
self._properties |= other._properties
self._pairs |= other._pairs
def intersection_update(self, other, ignore_conflicts=False):
"""Update the definition with the intersection of the ``other``."""
if not ignore_conflicts:
ensure_compatible(self, other)
self._objects &= other._objects
self._properties &= other._properties
self._pairs &= other._pairs
def __ior__(self, other):
self.union_update(other)
return self
def __iand__(self, other):
self.intersection_update(other)
return self
def union(self, other, ignore_conflicts=False):
"""Return a new definition from the union of the definitions."""
result = self.copy()
result.union_update(other, ignore_conflicts)
return result
def intersection(self, other, ignore_conflicts=False):
"""Return a new definition from the intersection of the definitions."""
result = self.copy()
result.intersection_update(other, ignore_conflicts)
return result
__or__ = union
__and__ = intersection
|
xflr6/concepts | concepts/definitions.py | Definition.remove_object | python | def remove_object(self, obj):
self._objects.remove(obj)
self._pairs.difference_update((obj, p) for p in self._properties) | Remove an object from the definition. | train | https://github.com/xflr6/concepts/blob/2801b27b05fa02cccee7d549451810ffcbf5c942/concepts/definitions.py#L362-L365 | null | class Definition(Triple):
"""Mutable triple of ``(objects, properties, bools)`` for creating a context.
Example:
>>> Definition(['man', 'woman'], ['male', 'female'], [(True, False), (False, True)])
<Definition(['man', 'woman'], ['male', 'female'], [(True, False), (False, True)])>
Usage:
>>> d = Definition()
>>> d
<Definition([], [], [])>
>>> d.add_object('King Arthur')
>>> print(d)
|
King Arthur|
>>> d.add_object('Sir Robin', ['human', 'knight'])
>>> d.add_object('holy grail')
>>> print(d)
|human|knight|
King Arthur| | |
Sir Robin |X |X |
holy grail | | |
>>> d.add_object('King Arthur', ['human', 'knight', 'king'])
>>> d.add_property('mysterious', ['holy grail', 'Sir Robin'])
>>> print(d)
|human|knight|king|mysterious|
King Arthur|X |X |X | |
Sir Robin |X |X | |X |
holy grail | | | |X |
>>> d['Sir Robin', 'mysterious'] = False
>>> print(d)
|human|knight|king|mysterious|
King Arthur|X |X |X | |
Sir Robin |X |X | | |
holy grail | | | |X |
>>> e = d.copy()
>>> e.move_object('holy grail', 0)
>>> e.move_property('mysterious', 0)
>>> e.move_property('king', 1)
>>> print(e)
|mysterious|king|human|knight|
holy grail |X | | | |
King Arthur| |X |X |X |
Sir Robin | | |X |X |
>>> e = d.copy()
>>> e.rename_object('Sir Robin', 'Launcelot')
>>> e.add_property('brave', ['Launcelot'])
>>> e.rename_object('holy grail', 'grail')
>>> e.rename_property('mysterious', 'holy')
>>> print(e)
|human|knight|king|holy|brave|
King Arthur|X |X |X | | |
Launcelot |X |X | | |X |
grail | | | |X | |
>>> print(e | d)
|human|knight|king|holy|brave|mysterious|
King Arthur|X |X |X | | | |
Launcelot |X |X | | |X | |
grail | | | |X | | |
Sir Robin |X |X | | | | |
holy grail | | | | | |X |
>>> print(e & d)
|human|knight|king|
King Arthur|X |X |X |
>>> e.remove_object('grail')
>>> e.remove_property('holy')
>>> e.rename_object('King Arthur', 'Arthur')
>>> e.set_property('king', [])
>>> e.set_object('Launcelot', ['human'])
>>> print(e)
|human|knight|king|brave|
Arthur |X |X | | |
Launcelot|X | | | |
>>> e.set_property('knight', ['Launcelot'])
>>> print(e)
|human|knight|king|brave|
Arthur |X | | | |
Launcelot|X |X | | |
"""
def rename_object(self, old, new):
"""Replace the name of an object by a new one."""
self._objects.replace(old, new)
pairs = self._pairs
pairs |= {(new, p) for p in self._properties
if (old, p) in pairs and not pairs.remove((old, p))}
def rename_property(self, old, new):
"""Replace the name of a property by a new one."""
self._properties.replace(old, new)
pairs = self._pairs
pairs |= {(o, new) for o in self._objects
if (o, old) in pairs and not pairs.remove((o, old))}
def move_object(self, obj, index):
"""Reorder the definition such that object is at ``index``."""
self._objects.move(obj, index)
def move_property(self, prop, index):
"""Reorder the definition such that property is at ``index``."""
self._properties.move(prop, index)
def __setitem__(self, pair, value):
if isinstance(pair, int):
raise ValueError("can't set item")
o, p = pair
self._objects.add(o)
self._properties.add(p)
if value:
self._pairs.add(pair)
else:
self._pairs.discard(pair)
def add_object(self, obj, properties=()):
"""Add an object to the definition and add ``properties`` as related."""
self._objects.add(obj)
self._properties |= properties
self._pairs.update((obj, p) for p in properties)
def add_property(self, prop, objects=()):
"""Add a property to the definition and add ``objects`` as related."""
self._properties.add(prop)
self._objects |= objects
self._pairs.update((o, prop) for o in objects)
def remove_property(self, prop):
"""Remove a property from the definition."""
self._properties.remove(prop)
self._pairs.difference_update((o, prop) for o in self._objects)
def set_object(self, obj, properties):
"""Add an object to the definition and set its ``properties``."""
self._objects.add(obj)
properties = set(properties)
self._properties |= properties
pairs = self._pairs
for p in self._properties:
if p in properties:
pairs.add((obj, p))
else:
pairs.discard((obj, p))
def set_property(self, prop, objects):
"""Add a property to the definition and set its ``objects``."""
self._properties.add(prop)
objects = set(objects)
self._objects |= objects
pairs = self._pairs
for o in self._objects:
if o in objects:
pairs.add((o, prop))
else:
pairs.discard((o, prop))
def union_update(self, other, ignore_conflicts=False):
"""Update the definition with the union of the ``other``."""
if not ignore_conflicts:
ensure_compatible(self, other)
self._objects |= other._objects
self._properties |= other._properties
self._pairs |= other._pairs
def intersection_update(self, other, ignore_conflicts=False):
"""Update the definition with the intersection of the ``other``."""
if not ignore_conflicts:
ensure_compatible(self, other)
self._objects &= other._objects
self._properties &= other._properties
self._pairs &= other._pairs
def __ior__(self, other):
self.union_update(other)
return self
def __iand__(self, other):
self.intersection_update(other)
return self
def union(self, other, ignore_conflicts=False):
"""Return a new definition from the union of the definitions."""
result = self.copy()
result.union_update(other, ignore_conflicts)
return result
def intersection(self, other, ignore_conflicts=False):
"""Return a new definition from the intersection of the definitions."""
result = self.copy()
result.intersection_update(other, ignore_conflicts)
return result
__or__ = union
__and__ = intersection
|
xflr6/concepts | concepts/definitions.py | Definition.remove_property | python | def remove_property(self, prop):
self._properties.remove(prop)
self._pairs.difference_update((o, prop) for o in self._objects) | Remove a property from the definition. | train | https://github.com/xflr6/concepts/blob/2801b27b05fa02cccee7d549451810ffcbf5c942/concepts/definitions.py#L367-L370 | null | class Definition(Triple):
"""Mutable triple of ``(objects, properties, bools)`` for creating a context.
Example:
>>> Definition(['man', 'woman'], ['male', 'female'], [(True, False), (False, True)])
<Definition(['man', 'woman'], ['male', 'female'], [(True, False), (False, True)])>
Usage:
>>> d = Definition()
>>> d
<Definition([], [], [])>
>>> d.add_object('King Arthur')
>>> print(d)
|
King Arthur|
>>> d.add_object('Sir Robin', ['human', 'knight'])
>>> d.add_object('holy grail')
>>> print(d)
|human|knight|
King Arthur| | |
Sir Robin |X |X |
holy grail | | |
>>> d.add_object('King Arthur', ['human', 'knight', 'king'])
>>> d.add_property('mysterious', ['holy grail', 'Sir Robin'])
>>> print(d)
|human|knight|king|mysterious|
King Arthur|X |X |X | |
Sir Robin |X |X | |X |
holy grail | | | |X |
>>> d['Sir Robin', 'mysterious'] = False
>>> print(d)
|human|knight|king|mysterious|
King Arthur|X |X |X | |
Sir Robin |X |X | | |
holy grail | | | |X |
>>> e = d.copy()
>>> e.move_object('holy grail', 0)
>>> e.move_property('mysterious', 0)
>>> e.move_property('king', 1)
>>> print(e)
|mysterious|king|human|knight|
holy grail |X | | | |
King Arthur| |X |X |X |
Sir Robin | | |X |X |
>>> e = d.copy()
>>> e.rename_object('Sir Robin', 'Launcelot')
>>> e.add_property('brave', ['Launcelot'])
>>> e.rename_object('holy grail', 'grail')
>>> e.rename_property('mysterious', 'holy')
>>> print(e)
|human|knight|king|holy|brave|
King Arthur|X |X |X | | |
Launcelot |X |X | | |X |
grail | | | |X | |
>>> print(e | d)
|human|knight|king|holy|brave|mysterious|
King Arthur|X |X |X | | | |
Launcelot |X |X | | |X | |
grail | | | |X | | |
Sir Robin |X |X | | | | |
holy grail | | | | | |X |
>>> print(e & d)
|human|knight|king|
King Arthur|X |X |X |
>>> e.remove_object('grail')
>>> e.remove_property('holy')
>>> e.rename_object('King Arthur', 'Arthur')
>>> e.set_property('king', [])
>>> e.set_object('Launcelot', ['human'])
>>> print(e)
|human|knight|king|brave|
Arthur |X |X | | |
Launcelot|X | | | |
>>> e.set_property('knight', ['Launcelot'])
>>> print(e)
|human|knight|king|brave|
Arthur |X | | | |
Launcelot|X |X | | |
"""
def rename_object(self, old, new):
"""Replace the name of an object by a new one."""
self._objects.replace(old, new)
pairs = self._pairs
pairs |= {(new, p) for p in self._properties
if (old, p) in pairs and not pairs.remove((old, p))}
def rename_property(self, old, new):
"""Replace the name of a property by a new one."""
self._properties.replace(old, new)
pairs = self._pairs
pairs |= {(o, new) for o in self._objects
if (o, old) in pairs and not pairs.remove((o, old))}
def move_object(self, obj, index):
"""Reorder the definition such that object is at ``index``."""
self._objects.move(obj, index)
def move_property(self, prop, index):
"""Reorder the definition such that property is at ``index``."""
self._properties.move(prop, index)
def __setitem__(self, pair, value):
if isinstance(pair, int):
raise ValueError("can't set item")
o, p = pair
self._objects.add(o)
self._properties.add(p)
if value:
self._pairs.add(pair)
else:
self._pairs.discard(pair)
def add_object(self, obj, properties=()):
"""Add an object to the definition and add ``properties`` as related."""
self._objects.add(obj)
self._properties |= properties
self._pairs.update((obj, p) for p in properties)
def add_property(self, prop, objects=()):
"""Add a property to the definition and add ``objects`` as related."""
self._properties.add(prop)
self._objects |= objects
self._pairs.update((o, prop) for o in objects)
def remove_object(self, obj):
"""Remove an object from the definition."""
self._objects.remove(obj)
self._pairs.difference_update((obj, p) for p in self._properties)
def set_object(self, obj, properties):
"""Add an object to the definition and set its ``properties``."""
self._objects.add(obj)
properties = set(properties)
self._properties |= properties
pairs = self._pairs
for p in self._properties:
if p in properties:
pairs.add((obj, p))
else:
pairs.discard((obj, p))
def set_property(self, prop, objects):
"""Add a property to the definition and set its ``objects``."""
self._properties.add(prop)
objects = set(objects)
self._objects |= objects
pairs = self._pairs
for o in self._objects:
if o in objects:
pairs.add((o, prop))
else:
pairs.discard((o, prop))
def union_update(self, other, ignore_conflicts=False):
"""Update the definition with the union of the ``other``."""
if not ignore_conflicts:
ensure_compatible(self, other)
self._objects |= other._objects
self._properties |= other._properties
self._pairs |= other._pairs
def intersection_update(self, other, ignore_conflicts=False):
"""Update the definition with the intersection of the ``other``."""
if not ignore_conflicts:
ensure_compatible(self, other)
self._objects &= other._objects
self._properties &= other._properties
self._pairs &= other._pairs
def __ior__(self, other):
self.union_update(other)
return self
def __iand__(self, other):
self.intersection_update(other)
return self
def union(self, other, ignore_conflicts=False):
"""Return a new definition from the union of the definitions."""
result = self.copy()
result.union_update(other, ignore_conflicts)
return result
def intersection(self, other, ignore_conflicts=False):
"""Return a new definition from the intersection of the definitions."""
result = self.copy()
result.intersection_update(other, ignore_conflicts)
return result
__or__ = union
__and__ = intersection
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.