index
int64 0
731k
| package
stringlengths 2
98
⌀ | name
stringlengths 1
76
| docstring
stringlengths 0
281k
⌀ | code
stringlengths 4
1.07M
⌀ | signature
stringlengths 2
42.8k
⌀ |
|---|---|---|---|---|---|
723,296
|
exponent_server_sdk
|
__init__
| null |
def __init__(self, message, response, response_data=None, errors=None):
self.message = message
self.response = response
self.response_data = response_data
self.errors = errors
super(PushServerError, self).__init__(self.message)
|
(self, message, response, response_data=None, errors=None)
|
723,297
|
exponent_server_sdk
|
PushTicket
|
Wrapper class for a push notification response.
A successful single push notification:
{'status': 'ok'}
An invalid push token
{'status': 'error',
'message': '"adsf" is not a registered push notification recipient'}
|
class PushTicket(
namedtuple('PushTicket',
['push_message', 'status', 'message', 'details', 'id'])):
"""Wrapper class for a push notification response.
A successful single push notification:
{'status': 'ok'}
An invalid push token
{'status': 'error',
'message': '"adsf" is not a registered push notification recipient'}
"""
# Known status codes
ERROR_STATUS = 'error'
SUCCESS_STATUS = 'ok'
# Known error strings
ERROR_DEVICE_NOT_REGISTERED = 'DeviceNotRegistered'
ERROR_MESSAGE_TOO_BIG = 'MessageTooBig'
ERROR_MESSAGE_RATE_EXCEEDED = 'MessageRateExceeded'
def is_success(self):
"""Returns True if this push notification successfully sent."""
return self.status == PushTicket.SUCCESS_STATUS
def validate_response(self):
"""Raises an exception if there was an error. Otherwise, do nothing.
Clients should handle these errors, since these require custom handling
to properly resolve.
"""
if self.is_success():
return
# Handle the error if we have any information
if self.details:
error = self.details.get('error', None)
if error == PushTicket.ERROR_DEVICE_NOT_REGISTERED:
raise DeviceNotRegisteredError(self)
elif error == PushTicket.ERROR_MESSAGE_TOO_BIG:
raise MessageTooBigError(self)
elif error == PushTicket.ERROR_MESSAGE_RATE_EXCEEDED:
raise MessageRateExceededError(self)
# No known error information, so let's raise a generic error.
raise PushTicketError(self)
|
(push_message, status, message, details, id)
|
723,299
|
namedtuple_PushTicket
|
__new__
|
Create new instance of PushTicket(push_message, status, message, details, id)
|
from builtins import function
|
(_cls, push_message, status, message, details, id)
|
723,302
|
collections
|
_replace
|
Return a new PushTicket object replacing specified fields with new values
|
def namedtuple(typename, field_names, *, rename=False, defaults=None, module=None):
"""Returns a new subclass of tuple with named fields.
>>> Point = namedtuple('Point', ['x', 'y'])
>>> Point.__doc__ # docstring for the new class
'Point(x, y)'
>>> p = Point(11, y=22) # instantiate with positional args or keywords
>>> p[0] + p[1] # indexable like a plain tuple
33
>>> x, y = p # unpack like a regular tuple
>>> x, y
(11, 22)
>>> p.x + p.y # fields also accessible by name
33
>>> d = p._asdict() # convert to a dictionary
>>> d['x']
11
>>> Point(**d) # convert from a dictionary
Point(x=11, y=22)
>>> p._replace(x=100) # _replace() is like str.replace() but targets named fields
Point(x=100, y=22)
"""
# Validate the field names. At the user's option, either generate an error
# message or automatically replace the field name with a valid name.
if isinstance(field_names, str):
field_names = field_names.replace(',', ' ').split()
field_names = list(map(str, field_names))
typename = _sys.intern(str(typename))
if rename:
seen = set()
for index, name in enumerate(field_names):
if (not name.isidentifier()
or _iskeyword(name)
or name.startswith('_')
or name in seen):
field_names[index] = f'_{index}'
seen.add(name)
for name in [typename] + field_names:
if type(name) is not str:
raise TypeError('Type names and field names must be strings')
if not name.isidentifier():
raise ValueError('Type names and field names must be valid '
f'identifiers: {name!r}')
if _iskeyword(name):
raise ValueError('Type names and field names cannot be a '
f'keyword: {name!r}')
seen = set()
for name in field_names:
if name.startswith('_') and not rename:
raise ValueError('Field names cannot start with an underscore: '
f'{name!r}')
if name in seen:
raise ValueError(f'Encountered duplicate field name: {name!r}')
seen.add(name)
field_defaults = {}
if defaults is not None:
defaults = tuple(defaults)
if len(defaults) > len(field_names):
raise TypeError('Got more default values than field names')
field_defaults = dict(reversed(list(zip(reversed(field_names),
reversed(defaults)))))
# Variables used in the methods and docstrings
field_names = tuple(map(_sys.intern, field_names))
num_fields = len(field_names)
arg_list = ', '.join(field_names)
if num_fields == 1:
arg_list += ','
repr_fmt = '(' + ', '.join(f'{name}=%r' for name in field_names) + ')'
tuple_new = tuple.__new__
_dict, _tuple, _len, _map, _zip = dict, tuple, len, map, zip
# Create all the named tuple methods to be added to the class namespace
namespace = {
'_tuple_new': tuple_new,
'__builtins__': {},
'__name__': f'namedtuple_{typename}',
}
code = f'lambda _cls, {arg_list}: _tuple_new(_cls, ({arg_list}))'
__new__ = eval(code, namespace)
__new__.__name__ = '__new__'
__new__.__doc__ = f'Create new instance of {typename}({arg_list})'
if defaults is not None:
__new__.__defaults__ = defaults
@classmethod
def _make(cls, iterable):
result = tuple_new(cls, iterable)
if _len(result) != num_fields:
raise TypeError(f'Expected {num_fields} arguments, got {len(result)}')
return result
_make.__func__.__doc__ = (f'Make a new {typename} object from a sequence '
'or iterable')
def _replace(self, /, **kwds):
result = self._make(_map(kwds.pop, field_names, self))
if kwds:
raise ValueError(f'Got unexpected field names: {list(kwds)!r}')
return result
_replace.__doc__ = (f'Return a new {typename} object replacing specified '
'fields with new values')
def __repr__(self):
'Return a nicely formatted representation string'
return self.__class__.__name__ + repr_fmt % self
def _asdict(self):
'Return a new dict which maps field names to their values.'
return _dict(_zip(self._fields, self))
def __getnewargs__(self):
'Return self as a plain tuple. Used by copy and pickle.'
return _tuple(self)
# Modify function metadata to help with introspection and debugging
for method in (
__new__,
_make.__func__,
_replace,
__repr__,
_asdict,
__getnewargs__,
):
method.__qualname__ = f'{typename}.{method.__name__}'
# Build-up the class namespace dictionary
# and use type() to build the result class
class_namespace = {
'__doc__': f'{typename}({arg_list})',
'__slots__': (),
'_fields': field_names,
'_field_defaults': field_defaults,
'__new__': __new__,
'_make': _make,
'_replace': _replace,
'__repr__': __repr__,
'_asdict': _asdict,
'__getnewargs__': __getnewargs__,
'__match_args__': field_names,
}
for index, name in enumerate(field_names):
doc = _sys.intern(f'Alias for field number {index}')
class_namespace[name] = _tuplegetter(index, doc)
result = type(typename, (tuple,), class_namespace)
# For pickling to work, the __module__ variable needs to be set to the frame
# where the named tuple is created. Bypass this step in environments where
# sys._getframe is not defined (Jython for example) or sys._getframe is not
# defined for arguments greater than 0 (IronPython), or where the user has
# specified a particular module.
if module is None:
try:
module = _sys._getframe(1).f_globals.get('__name__', '__main__')
except (AttributeError, ValueError):
pass
if module is not None:
result.__module__ = module
return result
|
(self, /, **kwds)
|
723,303
|
exponent_server_sdk
|
is_success
|
Returns True if this push notification successfully sent.
|
def is_success(self):
"""Returns True if this push notification successfully sent."""
return self.status == PushTicket.SUCCESS_STATUS
|
(self)
|
723,304
|
exponent_server_sdk
|
validate_response
|
Raises an exception if there was an error. Otherwise, do nothing.
Clients should handle these errors, since these require custom handling
to properly resolve.
|
def validate_response(self):
"""Raises an exception if there was an error. Otherwise, do nothing.
Clients should handle these errors, since these require custom handling
to properly resolve.
"""
if self.is_success():
return
# Handle the error if we have any information
if self.details:
error = self.details.get('error', None)
if error == PushTicket.ERROR_DEVICE_NOT_REGISTERED:
raise DeviceNotRegisteredError(self)
elif error == PushTicket.ERROR_MESSAGE_TOO_BIG:
raise MessageTooBigError(self)
elif error == PushTicket.ERROR_MESSAGE_RATE_EXCEEDED:
raise MessageRateExceededError(self)
# No known error information, so let's raise a generic error.
raise PushTicketError(self)
|
(self)
|
723,305
|
exponent_server_sdk
|
PushTicketError
|
Base class for all push ticket errors
|
class PushTicketError(Exception):
"""Base class for all push ticket errors"""
def __init__(self, push_response):
if push_response.message:
self.message = push_response.message
else:
self.message = 'Unknown push ticket error'
super(PushTicketError, self).__init__(self.message)
self.push_response = push_response
|
(push_response)
|
723,313
|
deform.form
|
Button
|
A class representing a form submit button. A sequence of
:class:`deform.widget.Button` objects may be passed to the
constructor of a :class:`deform.form.Form` class when it is
created to represent the buttons renderered at the bottom of the
form.
Arguments:
name
The string or unicode value used as the ``name`` of the button
when rendered (the ``name`` attribute of the button or input
tag resulting from a form rendering). Default: ``submit``.
title
The value used as the title of the button when rendered (shows
up in the button inner text). Default: capitalization of
whatever is passed as ``name``. E.g. if ``name`` is passed as
``submit``, ``title`` will be ``Submit``.
type
The value used as the type of button. The HTML spec supports
``submit``, ``reset`` and ``button``. A special value of
``link`` will create a regular HTML link that's styled to look
like a button. Default: ``submit``.
value
The value used as the value of the button when rendered (the
``value`` attribute of the button or input tag resulting from
a form rendering). If the button ``type`` is ``link`` then
this setting is used as the URL for the link button.
Default: same as ``name`` passed.
icon
glyph icon name to include as part of button. (Ex. If you
wanted to add the glyphicon-plus to this button then you'd pass
in a value of ``plus``) Default: ``None`` (no icon is added)
disabled
Render the button as disabled if True.
css_class
The name of a CSS class to attach to the button. In the default
form rendering, this string will replace the default button type
(either ``btn-primary`` or ``btn-default``) on the the ``class``
attribute of the button. For example, if ``css_class`` was
``btn-danger`` then the resulting default class becomes
``btn btn-danger``. Default: ``None`` (use default class).
attributes
HTML5 attributes passed in as a dictionary. This is especially
useful for a Cancel button where you do not want the client to
validate the form inputs, for example
``attributes={"formnovalidate": "formnovalidate"}``.
|
class Button(object):
"""
A class representing a form submit button. A sequence of
:class:`deform.widget.Button` objects may be passed to the
constructor of a :class:`deform.form.Form` class when it is
created to represent the buttons renderered at the bottom of the
form.
Arguments:
name
The string or unicode value used as the ``name`` of the button
when rendered (the ``name`` attribute of the button or input
tag resulting from a form rendering). Default: ``submit``.
title
The value used as the title of the button when rendered (shows
up in the button inner text). Default: capitalization of
whatever is passed as ``name``. E.g. if ``name`` is passed as
``submit``, ``title`` will be ``Submit``.
type
The value used as the type of button. The HTML spec supports
``submit``, ``reset`` and ``button``. A special value of
``link`` will create a regular HTML link that's styled to look
like a button. Default: ``submit``.
value
The value used as the value of the button when rendered (the
``value`` attribute of the button or input tag resulting from
a form rendering). If the button ``type`` is ``link`` then
this setting is used as the URL for the link button.
Default: same as ``name`` passed.
icon
glyph icon name to include as part of button. (Ex. If you
wanted to add the glyphicon-plus to this button then you'd pass
in a value of ``plus``) Default: ``None`` (no icon is added)
disabled
Render the button as disabled if True.
css_class
The name of a CSS class to attach to the button. In the default
form rendering, this string will replace the default button type
(either ``btn-primary`` or ``btn-default``) on the the ``class``
attribute of the button. For example, if ``css_class`` was
``btn-danger`` then the resulting default class becomes
``btn btn-danger``. Default: ``None`` (use default class).
attributes
HTML5 attributes passed in as a dictionary. This is especially
useful for a Cancel button where you do not want the client to
validate the form inputs, for example
``attributes={"formnovalidate": "formnovalidate"}``.
"""
def __init__(
self,
name="submit",
title=None,
type="submit", # noQA
value=None,
disabled=False,
css_class=None,
icon=None,
attributes=None,
):
if attributes is None:
attributes = {}
if title is None:
title = name.capitalize()
name = re.sub(r"\s", "_", name)
if value is None and type != "link":
value = name
self.name = name
self.title = title
self.type = type # noQA
self.value = value
self.disabled = disabled
self.css_class = css_class
self.icon = icon
self.attributes = attributes
|
(name='submit', title=None, type='submit', value=None, disabled=False, css_class=None, icon=None, attributes=None)
|
723,314
|
deform.form
|
__init__
| null |
def __init__(
self,
name="submit",
title=None,
type="submit", # noQA
value=None,
disabled=False,
css_class=None,
icon=None,
attributes=None,
):
if attributes is None:
attributes = {}
if title is None:
title = name.capitalize()
name = re.sub(r"\s", "_", name)
if value is None and type != "link":
value = name
self.name = name
self.title = title
self.type = type # noQA
self.value = value
self.disabled = disabled
self.css_class = css_class
self.icon = icon
self.attributes = attributes
|
(self, name='submit', title=None, type='submit', value=None, disabled=False, css_class=None, icon=None, attributes=None)
|
723,315
|
deform.field
|
Field
|
Represents an individual form field (a visible object in a
form rendering).
A :class:`deform.form.Field` object instance is meant to last for
the duration of a single web request. As a result, a field object
is often used as a scratchpad by the widget associated with that
field. Using a field as a scratchpad makes it possible to build
implementations of state-retaining widgets while instances of
those widget still only need to be constructed once instead of on
each request.
*Attributes*
schema
The schema node associated with this field.
widget
The widget associated with this field. When no widget is
defined in the schema node, a default widget will be created.
The default widget will have a generated item_css_class
containing the normalized version of the ``name`` attribute
(with ``item`` prepended, e.g. ``item-username``).
NOTE: This behaviour is deprecated and will be removed in
the future. Mapping and Sequence Widget templates simply
render a css class on an item's container based on Field
information.
order
An integer indicating the relative order of this field's
construction to its children and parents.
oid
A string incorporating the ``order`` attribute that can be
used as a unique identifier in HTML code (often for ``id``
attributes of field-related elements). A default oid is
generated that looks like this: ``deformField0``. A
custom oid can provided, but if the field is cloned,
the clones will get unique default oids.
name
An alias for self.schema.name
title
An alias for self.schema.title
description
An alias for self.schema.description
required
An alias for self.schema.required
typ
An alias for self.schema.typ
children
Child fields of this field.
parent
The parent field of this field or ``None`` if this field is
the root. This is actually a property that returns the result
of ``weakref.ref(actualparent)()`` to avoid leaks due to circular
references, but it can be treated like the field itself.
error
The exception raised by the last attempted validation of the
schema element associated with this field. By default, this
attribute is ``None``. If non-None, this attribute is usually
an instance of the exception class
:exc:`colander.Invalid`, which has a ``msg`` attribute
providing a human-readable validation error message.
errormsg
The ``msg`` attribute of the ``error`` attached to this field
or ``None`` if the ``error`` attached to this field is ``None``.
renderer
The template :term:`renderer` associated with the form. If a
renderer is not passed to the constructor, the default deform
renderer will be used (the :term:`default renderer`).
counter
``None`` or an instance of ``itertools.counter`` which is used
to generate sequential order-related attributes such as
``oid`` and ``order``.
resource_registry
The :term:`resource registry` associated with this field.
autofocus
If the field's parent form has its ``focus`` argument set to
``on``, the first field out of all fields in this form with
``autofocus`` set to a true-ish value (``on``, ``True``, or
``autofocus``) will receive focus on page load. Default: ``None``.
*Constructor Arguments*
``renderer``, ``counter``, ``resource_registry`` and ``appstruct`` are
accepted as explicit keyword arguments to the :class:`deform.Field`.
These are also available as attribute values. ``renderer``, if passed,
is a template renderer as described in :ref:`creating_a_renderer`.
``counter``, if passed, should be an :attr:`itertools.counter` object
(useful when rendering multiple forms on the same page, see
https://deformdemo.pylonsproject.org/multiple_forms/.
``resource_registry``, if passed should be a widget resource registry
(see also :ref:`get_widget_resources`).
If any of these values is not passed, a suitable default values is used
in its place.
The ``appstruct`` constructor argument is used to prepopulate field
values related to this form's schema. If an appstruct is not supplied,
the form's fields will be rendered with default values unless an
appstruct is supplied to the ``render`` method explicitly.
The :class:`deform.Field` constructor also accepts *arbitrary*
keyword arguments. When an 'unknown' keyword argument is
passed, it is attached unmodified to the form field as an
attribute.
All keyword arguments (explicit and unknown) are also attached to
all *children* nodes of the field being constructed.
|
class Field(object):
"""Represents an individual form field (a visible object in a
form rendering).
A :class:`deform.form.Field` object instance is meant to last for
the duration of a single web request. As a result, a field object
is often used as a scratchpad by the widget associated with that
field. Using a field as a scratchpad makes it possible to build
implementations of state-retaining widgets while instances of
those widget still only need to be constructed once instead of on
each request.
*Attributes*
schema
The schema node associated with this field.
widget
The widget associated with this field. When no widget is
defined in the schema node, a default widget will be created.
The default widget will have a generated item_css_class
containing the normalized version of the ``name`` attribute
(with ``item`` prepended, e.g. ``item-username``).
NOTE: This behaviour is deprecated and will be removed in
the future. Mapping and Sequence Widget templates simply
render a css class on an item's container based on Field
information.
order
An integer indicating the relative order of this field's
construction to its children and parents.
oid
A string incorporating the ``order`` attribute that can be
used as a unique identifier in HTML code (often for ``id``
attributes of field-related elements). A default oid is
generated that looks like this: ``deformField0``. A
custom oid can provided, but if the field is cloned,
the clones will get unique default oids.
name
An alias for self.schema.name
title
An alias for self.schema.title
description
An alias for self.schema.description
required
An alias for self.schema.required
typ
An alias for self.schema.typ
children
Child fields of this field.
parent
The parent field of this field or ``None`` if this field is
the root. This is actually a property that returns the result
of ``weakref.ref(actualparent)()`` to avoid leaks due to circular
references, but it can be treated like the field itself.
error
The exception raised by the last attempted validation of the
schema element associated with this field. By default, this
attribute is ``None``. If non-None, this attribute is usually
an instance of the exception class
:exc:`colander.Invalid`, which has a ``msg`` attribute
providing a human-readable validation error message.
errormsg
The ``msg`` attribute of the ``error`` attached to this field
or ``None`` if the ``error`` attached to this field is ``None``.
renderer
The template :term:`renderer` associated with the form. If a
renderer is not passed to the constructor, the default deform
renderer will be used (the :term:`default renderer`).
counter
``None`` or an instance of ``itertools.counter`` which is used
to generate sequential order-related attributes such as
``oid`` and ``order``.
resource_registry
The :term:`resource registry` associated with this field.
autofocus
If the field's parent form has its ``focus`` argument set to
``on``, the first field out of all fields in this form with
``autofocus`` set to a true-ish value (``on``, ``True``, or
``autofocus``) will receive focus on page load. Default: ``None``.
*Constructor Arguments*
``renderer``, ``counter``, ``resource_registry`` and ``appstruct`` are
accepted as explicit keyword arguments to the :class:`deform.Field`.
These are also available as attribute values. ``renderer``, if passed,
is a template renderer as described in :ref:`creating_a_renderer`.
``counter``, if passed, should be an :attr:`itertools.counter` object
(useful when rendering multiple forms on the same page, see
https://deformdemo.pylonsproject.org/multiple_forms/.
``resource_registry``, if passed should be a widget resource registry
(see also :ref:`get_widget_resources`).
If any of these values is not passed, a suitable default values is used
in its place.
The ``appstruct`` constructor argument is used to prepopulate field
values related to this form's schema. If an appstruct is not supplied,
the form's fields will be rendered with default values unless an
appstruct is supplied to the ``render`` method explicitly.
The :class:`deform.Field` constructor also accepts *arbitrary*
keyword arguments. When an 'unknown' keyword argument is
passed, it is attached unmodified to the form field as an
attribute.
All keyword arguments (explicit and unknown) are also attached to
all *children* nodes of the field being constructed.
"""
error = None
_cstruct = colander.null
default_renderer = template.default_renderer
default_resource_registry = widget.default_resource_registry
# Allowable input types for automatic focusing
focusable_input_types = (
type(colander.Boolean()),
type(colander.Date()),
type(colander.DateTime()),
type(colander.Decimal()),
type(colander.Float()),
type(colander.Integer()),
type(colander.Set()),
type(colander.String()),
type(colander.Time()),
)
hidden_type = type(HiddenWidget())
def __init__(
self,
schema,
renderer=None,
counter=None,
resource_registry=None,
appstruct=colander.null,
parent=None,
autofocus=None,
**kw
):
self.counter = counter or itertools.count()
self.order = next(self.counter)
self.oid = getattr(schema, "oid", "deformField%s" % self.order)
self.schema = schema
self.typ = schema.typ # required by Invalid exception
self.name = schema.name
self.title = schema.title
self.description = schema.description
self.required = schema.required
if renderer is None:
renderer = self.default_renderer
if resource_registry is None:
resource_registry = self.default_resource_registry
self.renderer = renderer
# Parameters passed from parent field to child
if "focus" in kw:
focus = kw["focus"]
else:
focus = "on"
if "have_first_input" in kw:
self.have_first_input = kw["have_first_input"]
else:
self.have_first_input = False
if (
focus == "off"
or autofocus is None
or autofocus is False
or str(autofocus).lower() == "off"
):
self.autofocus = None
else:
self.autofocus = "autofocus"
self.resource_registry = resource_registry
self.children = []
if parent is not None:
parent = weakref.ref(parent)
self._parent = parent
self.__dict__.update(kw)
first_input_index = -1
child_count = 0
focused = False
for child in schema.children:
if (
focus == "on"
and not focused
and type(child.typ) in Field.focusable_input_types
and type(child.widget) != Field.hidden_type
and not self.have_first_input
):
first_input_index = child_count
self.found_first() # Notify ancestors
autofocus = getattr(child, "autofocus", None)
if autofocus is not None:
focused = True
kw["have_first_input"] = self.have_first_input
self.children.append(
Field(
child,
renderer=renderer,
counter=self.counter,
resource_registry=resource_registry,
parent=self,
autofocus=autofocus,
**kw
)
)
child_count += 1
if (
focus == "on"
and not focused
and first_input_index != -1
and self.have_first_input
):
# User did not set autofocus. Focus on first valid input.
self.children[first_input_index].autofocus = "autofocus"
self.set_appstruct(appstruct)
def found_first(self):
""" Set have_first_input of ancestors """
self.have_first_input = True
if self.parent is not None:
self.parent.found_first()
@property
def parent(self):
if self._parent is None:
return None
return self._parent()
def get_root(self):
""" Return the root field in the field hierarchy (the form field) """
node = self
while True:
parent = node.parent
if parent is None:
break
node = parent
return node
@classmethod
def set_zpt_renderer(
cls,
search_path,
auto_reload=True,
debug=True,
encoding="utf-8",
translator=None,
):
"""Create a :term:`Chameleon` ZPT renderer that will act as a
:term:`default renderer` for instances of the associated class
when no ``renderer`` argument is provided to the class'
constructor. The arguments to this classmethod have the same
meaning as the arguments provided to a
:class:`deform.ZPTRendererFactory`.
Calling this method resets the :term:`default renderer`.
This method is effectively a shortcut for
``cls.set_default_renderer(ZPTRendererFactory(...))``."""
cls.default_renderer = template.ZPTRendererFactory(
search_path,
auto_reload=auto_reload,
debug=debug,
encoding=encoding,
translator=translator,
)
@classmethod
def set_default_renderer(cls, renderer):
"""Set the callable that will act as a default renderer for
instances of the associated class when no ``renderer``
argument is provided to the class' constructor. Useful when
you'd like to use an alternate templating system.
Calling this method resets the :term:`default renderer`.
"""
cls.default_renderer = staticmethod(renderer)
@classmethod
def set_default_resource_registry(cls, registry):
"""Set the callable that will act as a default
:term:`resource registry` for instances of the associated
class when no ``resource_registry`` argument is provided to
the class' constructor. Useful when you'd like to use
non-default requirement to resource path mappings for the
entirety of a process.
Calling this method resets the default :term:`resource registry`.
"""
cls.default_resource_registry = registry
def translate(self, msgid):
"""Use the translator passed to the renderer of this field to
translate the msgid into a term and return the term. If the renderer
does not have a translator, this method will return the msgid."""
translate = getattr(self.renderer, "translate", None)
if translate is not None:
return translate(msgid)
return msgid
def __iter__(self):
""" Iterate over the children fields of this field. """
return iter(self.children)
def __getitem__(self, name):
"""Return the subfield of this field named ``name`` or raise
a :exc:`KeyError` if a subfield does not exist named ``name``."""
for child in self.children:
if child.name == name:
return child
raise KeyError(name)
def __contains__(self, name):
for child in self.children:
if child.name == name:
return True
return False
def clone(self):
"""Clone the field and its subfields, retaining attribute
information. Return the cloned field. The ``order``
attribute of the node is not cloned; instead the field
receives a new order attribute; it will be a number larger
than the last rendered field of this set. The parent of the cloned
node will become ``None`` unconditionally."""
cloned = self.__class__(self.schema)
cloned.__dict__.update(self.__dict__)
cloned.order = next(cloned.counter)
cloned.oid = "deformField%s" % cloned.order
cloned._parent = None
children = []
for field in self.children:
cloned_child = field.clone()
cloned_child._parent = weakref.ref(cloned)
children.append(cloned_child)
cloned.children = children
return cloned
@decorator.reify
def widget(self):
"""If a widget is not assigned directly to a field, this
function will be called to generate a default widget (only
once). The result of this function will then be assigned as
the ``widget`` attribute of the field for the rest of the
lifetime of this field. If a widget is assigned to a field
before form processing, this function will not be called."""
wdg = getattr(self.schema, "widget", None)
if wdg is not None:
return wdg
widget_maker = getattr(self.schema.typ, "widget_maker", None)
if widget_maker is None:
widget_maker = schema.default_widget_makers.get(
self.schema.typ.__class__
)
if widget_maker is None:
for (cls, wgt) in schema.default_widget_makers.items():
if isinstance(self.schema.typ, cls):
widget_maker = wgt
break
if widget_maker is None:
widget_maker = widget.TextInputWidget
return widget_maker(item_css_class=self.default_item_css_class())
def default_item_css_class(self):
if not self.name:
return None
css_class = (
unicodedata.normalize("NFKD", compat.text_type(self.name))
.encode("ascii", "ignore")
.decode("ascii")
)
css_class = re.sub(r"[^\w\s-]", "", css_class).strip().lower() # noQA
css_class = re.sub(r"[-\s]+", "-", css_class) # noQA
return "item-%s" % css_class
def get_widget_requirements(self):
"""Return a sequence of two tuples in the form
[(``requirement_name``, ``version``), ..].
The first element in each two-tuple represents a requirement
name. When a requirement name is returned as part of
``get_widget_requirements``, it means that one or more CSS or
Javascript resources need to be loaded by the page performing
the form rendering in order for some widget on the page to
function properly.
The second element in each two-tuple is the requested version
of the library resource. It may be ``None``, in which case
the version is unspecified.
See also the ``requirements`` attribute of
:class:`deform.Widget` and the explanation of widget
requirements in :ref:`get_widget_requirements`.
"""
L = []
requirements = [req for req in self.widget.requirements] + [
req
for child in self.children
for req in child.get_widget_requirements()
]
if requirements:
for requirement in requirements:
if isinstance(requirement, dict):
L.append(requirement)
else:
reqt = tuple(requirement)
if reqt not in L:
L.append(reqt)
return L
def get_widget_resources(self, requirements=None):
"""Return a resources dictionary in the form ``{'js':[seq],
'css':[seq]}``. ``js`` represents Javascript resources,
``css`` represents CSS resources. ``seq`` represents a
sequence of resource paths. Each path in ``seq`` represents a
relative resource name, as defined by the mapping of a
requirement to a set of resource specification by the
:term:`resource registry` attached to this field or form.
This method may raise a :exc:`ValueError` if the resource
registry associated with this field or form cannot resolve a
requirement to a set of resource paths.
The ``requirements`` argument represents a set of requirements
as returned by a manual call to
:meth:`deform.Field.get_widget_requirements`. If
``requirements`` is not supplied, the requirement are implied
by calling the :meth:`deform.Field.get_widget_requirements`
method against this form field.
See also :ref:`get_widget_resources`.
"""
if requirements is None:
requirements = self.get_widget_requirements()
resources = self.resource_registry(
(req for req in requirements if not isinstance(req, dict))
)
for req in requirements:
if not isinstance(req, dict):
continue
for key in {'js', 'css'}.intersection(req):
value = req[key]
if isinstance(value, str):
resources[key].append(value)
else:
resources[key].extend(value)
return resources
def set_widgets(self, values, separator="."):
"""set widgets of the child fields of this field
or form element. ``widgets`` should be a dictionary in the
form::
{'dotted.field.name':Widget(),
'dotted.field.name2':Widget()}
The keys of the dictionary are dotted names. Each dotted name
refers to a single field in the tree of fields that are
children of the field or form object upon which this method is
called.
The dotted name is split on its dots and the resulting list of
names is used as a search path into the child fields of this
field in order to find a field to which to assign the
associated widget.
Two special cases exist:
- If the key is the empty string (``''``), the widget is
assigned to the field upon which this method is called.
- If the key contains an asterisk as an element name, the
first child of the found element is traversed. This is most
useful for sequence fields, because the first (and only)
child of sequence fields is always the prototype field which
is used to render all fields in the sequence within a form
rendering.
If the ``separator`` argument is passed, it is should be a
string to be used as the dot character when splitting the
dotted names (useful for supplying if one of your field object
has a dot in its name, and you need to use a different
separator).
Examples follow. If the following form is used::
class Person(Schema):
first_name = SchemaNode(String())
last_name = SchemaNode(String())
class People(SequenceSchema):
person = Person()
class Conference(Schema):
people = People()
name = SchemaNode(String())
schema = Conference()
form = Form(schema)
The following invocations will have the following results
against the schema defined above:
``form.set_widgets({'people.person.first_name':TextAreaWidget()})``
Set the ``first_name`` field's widget to a ``TextAreaWidget``.
``form.set_widgets({'people.*.first_name':TextAreaWidget()})``
Set the ``first_name`` field's widget to a
``TextAreaWidget``.
``form.set_widgets({'people':MySequenceWidget()})``
Set the ``people`` sequence field's widget to a
``MySequenceWidget``.
``form.set_widgets({'people.*':MySequenceWidget()})``
Set the *person* field's widget to a ``MySequenceWidget``.
``form.set_widgets({'':MyMappingWidget()})``
Set *form* node's widget to a ``MyMappingWidget``.
"""
for k, v in values.items():
if not k:
self.widget = v
else:
path = k.split(separator)
field = self
while path:
element = path.pop(0)
if element == "*":
field = field.children[0]
else:
field = field[element]
field.widget = v
@property
def errormsg(self):
"""Return the ``msg`` attribute of the ``error`` attached to
this field. If the ``error`` attribute is ``None``,
the return value will be ``None``."""
return getattr(self.error, "msg", None)
def serialize(self, cstruct=_marker, **kw):
"""Serialize the cstruct into HTML and return the HTML string. This
function just turns around and calls ``self.widget.serialize(**kw)``;
therefore the field widget's ``serialize`` method should be expecting
any values sent in ``kw``. If ``cstruct`` is not passed, the cstruct
attached to this node will be injected into ``kw`` as ``cstruct``.
If ``field`` is not passed in ``kw``, this field will be injected
into ``kw`` as ``field``.
.. note::
Deform versions before 0.9.8 only accepted a ``readonly``
keyword argument to this function. Version 0.9.8 and later accept
arbitrary keyword arguments. It also required that
``cstruct`` was passed; it's broken out from
``kw`` in the method signature for backwards compatibility.
"""
if cstruct is _marker:
cstruct = self.cstruct
values = {"field": self, "cstruct": cstruct}
values.update(kw)
return self.widget.serialize(**values)
def deserialize(self, pstruct):
""" Deserialize the pstruct into a cstruct and return the cstruct."""
return self.widget.deserialize(self, pstruct)
def render(self, appstruct=_marker, **kw):
"""Render the field (or form) to HTML using ``appstruct`` as a set
of default values and returns the HTML string. ``appstruct`` is
typically a dictionary of application values matching the schema used
by this form, or ``colander.null`` to render all defaults. If it
is omitted, the rendering will use the ``appstruct`` passed to the
constructor.
Calling this method passing an appstruct is the same as calling::
cstruct = form.set_appstruct(appstruct)
form.serialize(cstruct, **kw)
Calling this method without passing an appstruct is the same as
calling::
cstruct = form.cstruct
form.serialize(cstruct, **kw)
See the documentation for
:meth:`colander.SchemaNode.serialize` and
:meth:`deform.widget.Widget.serialize` .
.. note::
Deform versions before 0.9.8 only accepted a ``readonly``
keyword argument to this function. Version 0.9.8 and later accept
arbitrary keyword arguments.
"""
if appstruct is not _marker:
self.set_appstruct(appstruct)
cstruct = self.cstruct
kw.pop("cstruct", None) # disallowed
html = self.serialize(cstruct, **kw)
return html
def validate(self, controls, subcontrol=None):
"""
Validate the set of controls returned by a form submission
against the schema associated with this field or form.
``controls`` should be a *document-ordered* sequence of
two-tuples that represent the form submission data. Each
two-tuple should be in the form ``(key, value)``. ``node``
should be the schema node associated with this widget.
For example, using WebOb, you can compute a suitable value for
``controls`` via::
request.POST.items()
Or, if you're using a ``cgi.FieldStorage`` object named
``fs``, you can compute a suitable value for ``controls``
via::
controls = []
if fs.list:
for control in fs.list:
if control.filename:
controls.append((control.name, control))
else:
controls.append((control.name, control.value))
Equivalent ways of computing ``controls`` should be available to
any web framework.
When the ``validate`` method is called:
- if the fields are successfully validated, a data structure
represented by the deserialization of the data as per the
schema is returned. It will be a mapping.
- If the fields cannot be successfully validated, a
:exc:`deform.exception.ValidationFailure` exception is raised.
The typical usage of ``validate`` in the wild is often
something like this (at least in terms of code found within
the body of a :mod:`pyramid` view function, the particulars
will differ in your web framework)::
from webob.exc import HTTPFound
from deform.exception import ValidationFailure
from deform import Form
import colander
from my_application import do_something
class MySchema(colander.MappingSchema):
color = colander.SchemaNode(colander.String())
schema = MySchema()
def view(request):
form = Form(schema, buttons=('submit',))
if 'submit' in request.POST: # form submission needs validation
controls = request.POST.items()
try:
deserialized = form.validate(controls)
do_something(deserialized)
return HTTPFound(location='http://example.com/success')
except ValidationFailure as e:
return {'form':e.render()}
else:
return {'form':form.render()} # the form just needs rendering
.. warning::
``form.validate(controls)`` mutates the ``form`` instance, so the
``form`` instance should be constructed (and live) inside one
request.
If ``subcontrol`` is supplied, it represents a named subitem in the
data returned by ``peppercorn.parse(controls)``. Use this subitem as
the pstruct to validate instead of using the entire result of
``peppercorn.parse(controls)`` as the pstruct to validate. For
example, if you've embedded a mapping in the form named ``user``, and
you want to validate only the data contained in that mapping instead
if all of the data in the form post, you might use
``form.validate(controls, subcontrol='user')``.
"""
try:
pstruct = peppercorn.parse(controls)
except ValueError as e:
exc = colander.Invalid(
self.schema, "Invalid peppercorn controls: %s" % e
)
self.widget.handle_error(self, exc)
cstruct = colander.null
raise exception.ValidationFailure(self, cstruct, exc)
if subcontrol is not None:
pstruct = pstruct.get(subcontrol, colander.null)
return self.validate_pstruct(pstruct)
def validate_pstruct(self, pstruct):
"""
Validate the pstruct passed. Works exactly like the
:class:`deform.field.validate` method, except it accepts a pstruct
instead of a set of form controls. A usage example follows::
if 'submit' in request.POST: # the form submission needs validation
controls = request.POST.items()
pstruct = peppercorn.parse(controls)
substruct = pstruct['submapping']
try:
deserialized = form.validate_pstruct(substruct)
do_something(deserialized)
return HTTPFound(location='http://example.com/success')
except ValidationFailure, e:
return {'form':e.render()}
else:
return {'form':form.render()} # the form just needs rendering
"""
exc = None
try:
cstruct = self.deserialize(pstruct)
except colander.Invalid as e:
# fill in errors raised by widgets
self.widget.handle_error(self, e)
cstruct = e.value
exc = e
self.cstruct = cstruct
try:
appstruct = self.schema.deserialize(cstruct)
except colander.Invalid as e:
# fill in errors raised by schema nodes
self.widget.handle_error(self, e)
exc = e
if exc:
raise exception.ValidationFailure(self, cstruct, exc)
return appstruct
def _get_cstruct(self):
return self._cstruct
def _set_cstruct(self, cstruct):
self._cstruct = cstruct
child_cstructs = self.schema.cstruct_children(cstruct)
if not isinstance(child_cstructs, colander.SequenceItems):
# If the schema's type returns SequenceItems, it means that the
# node is a sequence node, which means it has one child
# representing its prototype instead of a set of "real" children;
# our widget handle cloning the prototype node. The prototype's
# cstruct will already be set up with its default value by virtue
# of set_appstruct having been called in its constructor, and we
# needn't (and can't) do anything more.
for n, child in enumerate(self.children):
child.cstruct = child_cstructs[n]
def _del_cstruct(self):
if "_cstruct" in self.__dict__:
# rely on class-scope _cstruct (null)
del self._cstruct
cstruct = property(_get_cstruct, _set_cstruct, _del_cstruct)
def __repr__(self):
return "<%s.%s object at %d (schemanode %r)>" % (
self.__module__,
self.__class__.__name__,
id(self),
self.schema.name,
)
def set_appstruct(self, appstruct):
"""Set the cstruct of this node (and its child nodes) using
``appstruct`` as input."""
cstruct = self.schema.serialize(appstruct)
self.cstruct = cstruct
return cstruct
def set_pstruct(self, pstruct):
"""Set the cstruct of this node (and its child nodes) using
``pstruct`` as input."""
try:
cstruct = self.deserialize(pstruct)
except colander.Invalid as e:
# explicitly don't set errors
cstruct = e.value
self.cstruct = cstruct
def render_template(self, template, **kw):
"""Render the template named ``template`` using ``kw`` as the
top-level keyword arguments (augmented with ``field`` and ``cstruct``
if necessary)"""
values = {"field": self, "cstruct": self.cstruct}
values.update(kw) # allow caller to override field and cstruct
return self.renderer(template, **values)
# retail API
def start_mapping(self, name=None):
"""Create a start-mapping tag (a literal). If ``name`` is ``None``,
the name of this node will be used to generate the name in the tag.
See the :term:`Peppercorn` documentation for more information.
"""
if name is None:
name = self.name
tag = '<input type="hidden" name="__start__" value="%s:mapping"/>'
return Markup(tag % (name,))
def end_mapping(self, name=None):
"""Create an end-mapping tag (a literal). If ``name`` is ``None``,
the name of this node will be used to generate the name in the tag.
See the :term:`Peppercorn` documentation for more information.
"""
if name is None:
name = self.name
tag = '<input type="hidden" name="__end__" value="%s:mapping"/>'
return Markup(tag % (name,))
def start_sequence(self, name=None):
"""Create a start-sequence tag (a literal). If ``name`` is ``None``,
the name of this node will be used to generate the name in the tag.
See the :term:`Peppercorn` documentation for more information.
"""
if name is None:
name = self.name
tag = '<input type="hidden" name="__start__" value="%s:sequence"/>'
return Markup(tag % (name,))
def end_sequence(self, name=None):
"""Create an end-sequence tag (a literal). If ``name`` is ``None``,
the name of this node will be used to generate the name in the tag.
See the :term:`Peppercorn` documentation for more information.
"""
if name is None:
name = self.name
tag = '<input type="hidden" name="__end__" value="%s:sequence"/>'
return Markup(tag % (name,))
def start_rename(self, name=None):
"""Create a start-rename tag (a literal). If ``name`` is ``None``,
the name of this node will be used to generate the name in the tag.
See the :term:`Peppercorn` documentation for more information.
"""
if name is None:
name = self.name
tag = '<input type="hidden" name="__start__" value="%s:rename"/>'
return Markup(tag % (name,))
def end_rename(self, name=None):
"""Create a start-rename tag (a literal). If ``name`` is ``None``,
the name of this node will be used to generate the name in the tag.
See the :term:`Peppercorn` documentation for more information.
"""
if name is None:
name = self.name
tag = '<input type="hidden" name="__end__" value="%s:rename"/>'
return Markup(tag % (name,))
|
(schema, renderer=None, counter=None, resource_registry=None, appstruct=<colander.null>, parent=None, autofocus=None, **kw)
|
723,318
|
deform.field
|
__init__
| null |
def __init__(
self,
schema,
renderer=None,
counter=None,
resource_registry=None,
appstruct=colander.null,
parent=None,
autofocus=None,
**kw
):
self.counter = counter or itertools.count()
self.order = next(self.counter)
self.oid = getattr(schema, "oid", "deformField%s" % self.order)
self.schema = schema
self.typ = schema.typ # required by Invalid exception
self.name = schema.name
self.title = schema.title
self.description = schema.description
self.required = schema.required
if renderer is None:
renderer = self.default_renderer
if resource_registry is None:
resource_registry = self.default_resource_registry
self.renderer = renderer
# Parameters passed from parent field to child
if "focus" in kw:
focus = kw["focus"]
else:
focus = "on"
if "have_first_input" in kw:
self.have_first_input = kw["have_first_input"]
else:
self.have_first_input = False
if (
focus == "off"
or autofocus is None
or autofocus is False
or str(autofocus).lower() == "off"
):
self.autofocus = None
else:
self.autofocus = "autofocus"
self.resource_registry = resource_registry
self.children = []
if parent is not None:
parent = weakref.ref(parent)
self._parent = parent
self.__dict__.update(kw)
first_input_index = -1
child_count = 0
focused = False
for child in schema.children:
if (
focus == "on"
and not focused
and type(child.typ) in Field.focusable_input_types
and type(child.widget) != Field.hidden_type
and not self.have_first_input
):
first_input_index = child_count
self.found_first() # Notify ancestors
autofocus = getattr(child, "autofocus", None)
if autofocus is not None:
focused = True
kw["have_first_input"] = self.have_first_input
self.children.append(
Field(
child,
renderer=renderer,
counter=self.counter,
resource_registry=resource_registry,
parent=self,
autofocus=autofocus,
**kw
)
)
child_count += 1
if (
focus == "on"
and not focused
and first_input_index != -1
and self.have_first_input
):
# User did not set autofocus. Focus on first valid input.
self.children[first_input_index].autofocus = "autofocus"
self.set_appstruct(appstruct)
|
(self, schema, renderer=None, counter=None, resource_registry=None, appstruct=<colander.null>, parent=None, autofocus=None, **kw)
|
723,346
|
deform.schema
|
FileData
|
A type representing file data; used to shuttle data back and forth
between an application and the
:class:`deform.widget.FileUploadWidget` widget.
This type passes the value obtained during deserialization back to
the caller unchanged (it will be an instance of
``deform.widget.filedict``, which is a plain dictionary subclass;
it is only a dict subclass so ``isinstance`` may be used against
it in highly generalized persistence circumstances to detect that
it is file data). It serializes from a dictionary containing
partial file data info into a dictionary containing full file data
info, serializing the full file data (the widget receives the full
file data).
|
class FileData(object):
"""
A type representing file data; used to shuttle data back and forth
between an application and the
:class:`deform.widget.FileUploadWidget` widget.
This type passes the value obtained during deserialization back to
the caller unchanged (it will be an instance of
``deform.widget.filedict``, which is a plain dictionary subclass;
it is only a dict subclass so ``isinstance`` may be used against
it in highly generalized persistence circumstances to detect that
it is file data). It serializes from a dictionary containing
partial file data info into a dictionary containing full file data
info, serializing the full file data (the widget receives the full
file data).
"""
# We cant use FileUploadWidget as the default_widget_maker for
# this schema node because it requires a tmpstore argument, and
# a tmpstore cannot be generally defaulted.
def serialize(self, node, value):
"""
Serialize a dictionary representing partial file information
to a dictionary containing information expected by a file
upload widget.
The file data dictionary passed as ``value`` to this
``serialize`` method *must* include:
filename
Filename of this file (not a full filesystem path, just the
filename itself).
uid
Unique string id for this file. Needs to be unique enough to
disambiguate it from other files that may use the same
temporary storage mechanism before a successful validation,
and must be adequate for the calling code to reidentify it
after deserialization.
A fully populated dictionary *may* also include the following
values:
fp
File-like object representing this file's content or
``None``. ``None`` indicates this file has already been
committed to permanent storage. When serializing a
'committed' file, the ``fp`` value should ideally not be
passed or it should be passed as ``None``; ``None`` as an
``fp`` value is a signifier to the file upload widget that
the file data has already been committed. Using ``None``
as an ``fp`` value helps prevent unnecessary data copies
to temporary storage when a form is rendered, however its
use requires cooperation from the calling code; in
particular, the calling code must be willing to translate
a ``None`` ``fp`` value returned from a deserialization
into the file data via the ``uid`` in the deserialization.
mimetype
File content type (e.g. ``application/octet-stream``).
size
File content length (integer).
preview_url
URL which provides an image preview of this file's data.
If a ``size`` is not provided, the widget will have no access
to size display data. If ``preview_url`` is not provided, the
widget will not be able to show a file preview. If
``mimetype`` is not provided, the widget will not be able to
display mimetype information.
"""
if value is colander.null:
return colander.null
if not hasattr(value, "get"):
mapping = {"value": repr(value)}
raise colander.Invalid(
node, _("${value} is not a dictionary", mapping=mapping)
)
for n in ("filename", "uid"):
if n not in value:
mapping = {"value": repr(value), "key": n}
raise colander.Invalid(
node, _("${value} has no ${key} key", mapping=mapping)
)
result = widget.filedict(value)
# provide a value for these entries even if None
result["mimetype"] = value.get("mimetype")
result["size"] = value.get("size")
result["fp"] = value.get("fp")
result["preview_url"] = value.get("preview_url")
return result
def deserialize(self, node, value):
return value
def cstruct_children(self, node, cstruct): # pragma: no cover
return []
|
()
|
723,347
|
deform.schema
|
cstruct_children
| null |
def cstruct_children(self, node, cstruct): # pragma: no cover
return []
|
(self, node, cstruct)
|
723,348
|
deform.schema
|
deserialize
| null |
def deserialize(self, node, value):
return value
|
(self, node, value)
|
723,349
|
deform.schema
|
serialize
|
Serialize a dictionary representing partial file information
to a dictionary containing information expected by a file
upload widget.
The file data dictionary passed as ``value`` to this
``serialize`` method *must* include:
filename
Filename of this file (not a full filesystem path, just the
filename itself).
uid
Unique string id for this file. Needs to be unique enough to
disambiguate it from other files that may use the same
temporary storage mechanism before a successful validation,
and must be adequate for the calling code to reidentify it
after deserialization.
A fully populated dictionary *may* also include the following
values:
fp
File-like object representing this file's content or
``None``. ``None`` indicates this file has already been
committed to permanent storage. When serializing a
'committed' file, the ``fp`` value should ideally not be
passed or it should be passed as ``None``; ``None`` as an
``fp`` value is a signifier to the file upload widget that
the file data has already been committed. Using ``None``
as an ``fp`` value helps prevent unnecessary data copies
to temporary storage when a form is rendered, however its
use requires cooperation from the calling code; in
particular, the calling code must be willing to translate
a ``None`` ``fp`` value returned from a deserialization
into the file data via the ``uid`` in the deserialization.
mimetype
File content type (e.g. ``application/octet-stream``).
size
File content length (integer).
preview_url
URL which provides an image preview of this file's data.
If a ``size`` is not provided, the widget will have no access
to size display data. If ``preview_url`` is not provided, the
widget will not be able to show a file preview. If
``mimetype`` is not provided, the widget will not be able to
display mimetype information.
|
def serialize(self, node, value):
"""
Serialize a dictionary representing partial file information
to a dictionary containing information expected by a file
upload widget.
The file data dictionary passed as ``value`` to this
``serialize`` method *must* include:
filename
Filename of this file (not a full filesystem path, just the
filename itself).
uid
Unique string id for this file. Needs to be unique enough to
disambiguate it from other files that may use the same
temporary storage mechanism before a successful validation,
and must be adequate for the calling code to reidentify it
after deserialization.
A fully populated dictionary *may* also include the following
values:
fp
File-like object representing this file's content or
``None``. ``None`` indicates this file has already been
committed to permanent storage. When serializing a
'committed' file, the ``fp`` value should ideally not be
passed or it should be passed as ``None``; ``None`` as an
``fp`` value is a signifier to the file upload widget that
the file data has already been committed. Using ``None``
as an ``fp`` value helps prevent unnecessary data copies
to temporary storage when a form is rendered, however its
use requires cooperation from the calling code; in
particular, the calling code must be willing to translate
a ``None`` ``fp`` value returned from a deserialization
into the file data via the ``uid`` in the deserialization.
mimetype
File content type (e.g. ``application/octet-stream``).
size
File content length (integer).
preview_url
URL which provides an image preview of this file's data.
If a ``size`` is not provided, the widget will have no access
to size display data. If ``preview_url`` is not provided, the
widget will not be able to show a file preview. If
``mimetype`` is not provided, the widget will not be able to
display mimetype information.
"""
if value is colander.null:
return colander.null
if not hasattr(value, "get"):
mapping = {"value": repr(value)}
raise colander.Invalid(
node, _("${value} is not a dictionary", mapping=mapping)
)
for n in ("filename", "uid"):
if n not in value:
mapping = {"value": repr(value), "key": n}
raise colander.Invalid(
node, _("${value} has no ${key} key", mapping=mapping)
)
result = widget.filedict(value)
# provide a value for these entries even if None
result["mimetype"] = value.get("mimetype")
result["size"] = value.get("size")
result["fp"] = value.get("fp")
result["preview_url"] = value.get("preview_url")
return result
|
(self, node, value)
|
723,381
|
deform.exception
|
TemplateError
| null |
class TemplateError(Exception):
pass
| null |
723,382
|
deform.exception
|
ValidationFailure
|
The exception raised by :meth:`deform.widget.Widget.validate`
(most often called as ``form.validate(fields)``) when the supplied
field data does not pass the overall constraints of the schema
associated with the widget.
**Attributes**
``field``
The field :meth:`deform.form.Field.validate` was
called on (usually a :class:`deform.form.Form` object).
``cstruct``
The unvalidatable :term:`cstruct` that was returned from
:meth:`deform.widget.Widget.deserialize`.
``error``
The original :class:`colander.Invalid` exception raised by
:meth:`deform.schema.SchemaNode.deserialize` which caused
this exception to need to be raised.
|
class ValidationFailure(Exception):
"""
The exception raised by :meth:`deform.widget.Widget.validate`
(most often called as ``form.validate(fields)``) when the supplied
field data does not pass the overall constraints of the schema
associated with the widget.
**Attributes**
``field``
The field :meth:`deform.form.Field.validate` was
called on (usually a :class:`deform.form.Form` object).
``cstruct``
The unvalidatable :term:`cstruct` that was returned from
:meth:`deform.widget.Widget.deserialize`.
``error``
The original :class:`colander.Invalid` exception raised by
:meth:`deform.schema.SchemaNode.deserialize` which caused
this exception to need to be raised.
"""
def __init__(self, field, cstruct, error):
Exception.__init__(self)
self.field = field
self.cstruct = cstruct
self.error = error
def render(self, **kw):
"""
Used to reserialize the form in such a way that the user will
see error markers in the form HTML.
The ``**kw`` argument allows a caller to pass named arguments
that are passed on to the template.
"""
return self.field.widget.serialize(self.field, self.cstruct, **kw)
|
(field, cstruct, error)
|
723,383
|
deform.exception
|
__init__
| null |
def __init__(self, field, cstruct, error):
Exception.__init__(self)
self.field = field
self.cstruct = cstruct
self.error = error
|
(self, field, cstruct, error)
|
723,384
|
deform.exception
|
render
|
Used to reserialize the form in such a way that the user will
see error markers in the form HTML.
The ``**kw`` argument allows a caller to pass named arguments
that are passed on to the template.
|
def render(self, **kw):
"""
Used to reserialize the form in such a way that the user will
see error markers in the form HTML.
The ``**kw`` argument allows a caller to pass named arguments
that are passed on to the template.
"""
return self.field.widget.serialize(self.field, self.cstruct, **kw)
|
(self, **kw)
|
723,385
|
deform.template
|
ZPTRendererFactory
|
Construct a custom Chameleon ZPT :term:`renderer` for Deform.
If the template name is an asset spec (has a colon in it, e.g.
``mypackage:subdir/subdir2/mytemplate.pt``), use
``pkg_resources.resource_filename`` to resolve it.
Otherwise, fall back to search-path-based machinery to resolve it.
Allowing an asset spec allows users to specify templates without the
trouble of needing to add search paths to the deform rendering machinery.
**Arguments**
search_path
A sequence of strings representing fully qualified filesystem
directories containing Deform Chameleon template sources. The
order in which the directories are listed within ``search_path``
is the order in which they are checked for the template provided
to the renderer. If every resource is an asset spec, however,
the search path is never used.
auto_reload
If true, automatically reload templates when they change (slows
rendering). Default: ``True``.
debug
If true, show nicer tracebacks during Chameleon template rendering
errors (slows rendering). Default: ``True``.
encoding
The encoding that the on-disk representation of the templates
and all non-ASCII values passed to the template should be
expected to adhere to. Default: ``utf-8``.
translator
A translation function used for internationalization when the
``i18n:translate`` attribute syntax is used in the Chameleon
template is active or a
:class:`translationstring.TranslationString` is encountered
during output. It must accept a translation string and return
an interpolated translation. Default: ``None`` (no translation
performed).
|
class ZPTRendererFactory(object):
"""
Construct a custom Chameleon ZPT :term:`renderer` for Deform.
If the template name is an asset spec (has a colon in it, e.g.
``mypackage:subdir/subdir2/mytemplate.pt``), use
``pkg_resources.resource_filename`` to resolve it.
Otherwise, fall back to search-path-based machinery to resolve it.
Allowing an asset spec allows users to specify templates without the
trouble of needing to add search paths to the deform rendering machinery.
**Arguments**
search_path
A sequence of strings representing fully qualified filesystem
directories containing Deform Chameleon template sources. The
order in which the directories are listed within ``search_path``
is the order in which they are checked for the template provided
to the renderer. If every resource is an asset spec, however,
the search path is never used.
auto_reload
If true, automatically reload templates when they change (slows
rendering). Default: ``True``.
debug
If true, show nicer tracebacks during Chameleon template rendering
errors (slows rendering). Default: ``True``.
encoding
The encoding that the on-disk representation of the templates
and all non-ASCII values passed to the template should be
expected to adhere to. Default: ``utf-8``.
translator
A translation function used for internationalization when the
``i18n:translate`` attribute syntax is used in the Chameleon
template is active or a
:class:`translationstring.TranslationString` is encountered
during output. It must accept a translation string and return
an interpolated translation. Default: ``None`` (no translation
performed).
"""
def __init__(
self,
search_path,
auto_reload=True,
debug=False,
encoding="utf-8",
translator=None,
):
self.translate = translator
loader = ZPTTemplateLoader(
search_path=search_path,
auto_reload=auto_reload,
debug=debug,
encoding=encoding,
translate=ChameleonTranslate(translator),
)
self.loader = loader
def __call__(self, template_name, **kw):
return self.load(template_name)(**kw)
def load(self, template_name):
return self.loader.load(template_name)
|
(search_path, auto_reload=True, debug=False, encoding='utf-8', translator=None)
|
723,386
|
deform.template
|
__call__
| null |
def __call__(self, template_name, **kw):
return self.load(template_name)(**kw)
|
(self, template_name, **kw)
|
723,387
|
deform.template
|
__init__
| null |
def __init__(
self,
search_path,
auto_reload=True,
debug=False,
encoding="utf-8",
translator=None,
):
self.translate = translator
loader = ZPTTemplateLoader(
search_path=search_path,
auto_reload=auto_reload,
debug=debug,
encoding=encoding,
translate=ChameleonTranslate(translator),
)
self.loader = loader
|
(self, search_path, auto_reload=True, debug=False, encoding='utf-8', translator=None)
|
723,388
|
deform.template
|
load
| null |
def load(self, template_name):
return self.loader.load(template_name)
|
(self, template_name)
|
723,399
|
pyarrowfs_adlgen2.core
|
AccountHandler
|
Handler for a single azure storage account.
Use this to to access an Azure Storage account with hierarchial namespace enabled.
|
class AccountHandler(pyarrow.fs.FileSystemHandler):
"""Handler for a single azure storage account.
Use this to to access an Azure Storage account with hierarchial namespace enabled.
"""
def __init__(
self,
datalake_service: azure.storage.filedatalake.DataLakeServiceClient,
timeouts=DEFAULT_TIMEOUTS,
fs_handler_cls=FilesystemHandler
):
"""
:param datalake_service: data lake account service
:param timeouts: :class:`Timeouts` for datalake gen2 operations
:param fs_handler_cls: How to create FilesystemHandlers for interacting with
individual file systems in this account
:type datalake_service: azure.storage.filedatalake.DataLakeServiceClient
https://azuresdkdocs.blob.core.windows.net/$web/python/azure-storage-file-datalake/12.1.1/azure.storage.filedatalake.html#azure.storage.filedatalake.DataLakeServiceClient
"""
super().__init__()
self.datalake_service = datalake_service
self.file_system_handlers = {}
self.timeouts = timeouts
self.fs_handler_cls = fs_handler_cls
@classmethod
def from_account_name(
cls,
account_name,
credential=None,
timeouts=DEFAULT_TIMEOUTS,
fs_handler_cls=FilesystemHandler
):
"""
Create from storage account name and credential
:param account_name:
:param credential: Any valid valid value to pass as credential to
azure.storage.filedatalake.FileSystemClient
:param timeouts: :class:`Timeouts` for datalake gen2 operations
:param fs_handler_cls: How to create FilesystemHandlers for interacting with
individual file systems in this account
:type credential: str for SAS tokens, None for public access, any credential
from azure.identity
:return: pyarrow.fs.FileSystemHandler"""
datalake_service = azure.storage.filedatalake.DataLakeServiceClient(
f'https://{account_name}.dfs.core.windows.net',
credential
)
return cls(datalake_service, timeouts, fs_handler_cls=fs_handler_cls)
def __eq__(self, other):
if isinstance(other, AccountHandler):
return (
self.datalake_service == other.datalake_service
and self.timeouts == other.timeouts)
return NotImplemented
def __neq__(self, other):
if isinstance(other, AccountHandler):
return (
self.datalake_service != other.datalake_service
or self.timeouts != other.timeouts)
return NotImplemented
def get_type_name(self):
# azure blob file system
return f'abfs+{self.datalake_service.account_name}'
def normalize_path(self, path):
return path.lstrip('/').rstrip('/')
def _split_path(self, path):
path = self.normalize_path(path)
if '/' not in path:
return path, ''
fs_name, *rest = path.split('/')
path = '/'.join(rest)
if path.endswith('/'):
raise ValueError(f'{path} is an illegal path (may not end with /)')
return fs_name, path
def _fs(self, fs_name):
if fs_name in self.file_system_handlers:
return self.file_system_handlers[fs_name]
else:
new_fs_handler = self.fs_handler_cls(
self.datalake_service.get_file_system_client(fs_name),
prefix_fs=True,
timeouts=self.timeouts
)
return self.file_system_handlers.setdefault(fs_name, new_fs_handler)
def _get_file_info(self, path):
fs_name, path = self._split_path(path)
if not fs_name:
return pyarrow.fs.FileInfo(
'',
pyarrow.fs.FileType.Directory
)
return self._fs(fs_name)._get_file_info(path)
def get_file_info(self, paths):
return [self._get_file_info(path) for path in paths]
@document_timeout(
azure.storage.filedatalake.DataLakeServiceClient.list_file_systems,
"datalake_service_timeout")
def list_file_systems(self):
return self.datalake_service.list_file_systems(
timeout=self.timeouts.datalake_service_timeout)
@document_timeout(
azure.storage.filedatalake.DataLakeServiceClient.create_file_system,
"datalake_service_timeout")
def create_file_system(self, name):
return self.datalake_service.create_file_system(
name, timeout=self.timeouts.datalake_service_timeout
)
@document_timeout(
azure.storage.filedatalake.DataLakeServiceClient.delete_file_system,
"datalake_service_timeout")
def delete_file_system(self, name):
return self.datalake_service.delete_file_system(
name, timeout=self.timeouts.datalake_service_timeout
)
def get_file_info_selector(self, selector: pyarrow.fs.FileSelector):
fs_name, path = self._split_path(selector.base_dir)
if not fs_name:
file_system_data = [
pyarrow.fs.FileInfo(fs.name, pyarrow.fs.FileType.Directory, mtime=fs.last_modified)
for fs in self.list_file_systems()
]
if selector.recursive:
for fs in self.list_file_systems():
file_system_data.extend(self._fs(fs.name).get_file_info_selector(selector))
return file_system_data
else:
sub_selector = pyarrow.fs.FileSelector(
path, allow_not_found=selector.allow_not_found, recursive=selector.recursive
)
return self._fs(fs_name).get_file_info_selector(sub_selector)
def create_dir(self, path, recursive):
fs_name, path = self._split_path(path)
# This `create_dir` requires us to create a container if it does not exist
if recursive or not path:
try:
self.create_file_system(fs_name)
except azure.core.exceptions.ResourceExistsError:
pass
except azure.core.exceptions.HttpResponseError as e:
if 'AuthorizationFailure' in e.message:
# We don't have permission to create the file system, but it might still exist.
pass
else:
raise FileNotFoundError(fs_name) from e
if path:
try:
self._fs(fs_name).create_dir(path, recursive)
except azure.core.exceptions.ResourceNotFoundError as e:
raise FileNotFoundError(fs_name, path) from e
def delete_dir(self, path):
fs_name, path = self._split_path(path)
if not path:
self.delete_file_system(fs_name)
else:
self._fs(fs_name).delete_dir(path)
def delete_dir_contents(self, path, accept_root_dir=False):
fs_name, path = self._split_path(path)
if not fs_name:
if accept_root_dir:
for fs in self.list_file_systems():
self.delete_file_system(fs.name)
else:
raise ValueError('Attempt to remove root dir with accept_root_dir=False')
else:
# In _our_ context, root dir can not be within the child file system
self._fs(fs_name).delete_dir_contents(path, accept_root_dir=True)
def delete_root_dir_contents(self):
self.delete_dir_contents("")
def delete_file(self, path):
fs_name, path = self._split_path(path)
if not fs_name:
raise FileNotFoundError()
elif not path:
raise IsADirectoryError(fs_name)
else:
if fs_name not in [fs.name for fs in self.list_file_systems()]:
raise FileNotFoundError(fs_name)
self._fs(fs_name).delete_file(path)
def move(self, src, dest):
src_fs, src_path = self._split_path(src)
dst_fs, dst_path = self._split_path(dest)
if not src_path:
raise ValueError(f'Unsupported operation: moving fs {src_fs}')
if not dst_path:
raise ValueError(f'Unsupported operation: new name is file system {dst_fs}')
# Assume source exists, let caller deal with error
fi = self._fs(src_fs).get_file_info([src_path])[0]
try:
dest_fi = self.get_file_info([dest])[0]
if dest_fi.type == pyarrow.fs.FileType.Directory:
# Allow only if it is empty
selector = pyarrow.fs.FileSelector(dest, recursive=False)
if self.get_file_info_selector(selector):
raise ValueError(f'{dest} is non-empty directory')
if fi.type != dest_fi.type:
raise ValueError(f'src {src} is {fi.type}, but dest {dest} is {dest_fi.type}')
except FileNotFoundError:
pass
if fi.is_file:
self._fs(src_fs).rename_file(src_path, dest)
else:
self._fs(src_fs).rename_directory(src_path, dest)
def copy_file(self, src, dest):
try:
dest_fi = self._get_file_info(dest)
if dest_fi.type == pyarrow.fs.FileType.Directory:
raise IsADirectoryError(dest)
except FileNotFoundError:
pass
with self.open_input_stream(src) as read_from:
with self.open_output_stream(dest) as write_to:
write_to.write(read_from.read())
def _require_path(self, path):
if not path:
raise ValueError('Files can not exist on the root account level')
def open_input_stream(self, path):
fs_name, path = self._split_path(path)
self._require_path(path)
return self._fs(fs_name).open_input_stream(path)
def open_input_file(self, path):
fs_name, path = self._split_path(path)
self._require_path(path)
return self._fs(fs_name).open_input_file(path)
def open_output_stream(self, path, metadata=None):
"""Return an open output stream
Overwrite contents at `path`, if there are any.
Metadata if provided must use keys acceptable to
azure.storage.filedatalake.ContentSettings,
for example: content_type, content_encoding, cache_control
:param path: `str`
:param metadata: `dict`
"""
fs_name, path = self._split_path(path)
self._require_path(path)
return self._fs(fs_name).open_output_stream(path, metadata=metadata)
def open_append_stream(self, path, metadata=None):
"""Return an open output stream
Append to contents at `path`, if there are any.
Metadata if provided must use keys acceptable to
azure.storage.filedatalake.ContentSettings,
for example: content_type, content_encoding, cache_control
:param path: `str`
:param metadata: `dict`
"""
fs_name, path = self._split_path(path)
self._require_path(path)
return self._fs(fs_name).open_append_stream(path, metadata=metadata)
def to_fs(self):
return pyarrow.fs.PyFileSystem(self)
|
(datalake_service: azure.storage.filedatalake._data_lake_service_client.DataLakeServiceClient, timeouts=Timeouts(file_client_timeout=None, file_system_timeout=None, datalake_service_timeout=None, directory_client_timeout=None), fs_handler_cls=<class 'pyarrowfs_adlgen2.core.FilesystemHandler'>)
|
723,400
|
pyarrowfs_adlgen2.core
|
__eq__
| null |
def __eq__(self, other):
if isinstance(other, AccountHandler):
return (
self.datalake_service == other.datalake_service
and self.timeouts == other.timeouts)
return NotImplemented
|
(self, other)
|
723,401
|
pyarrowfs_adlgen2.core
|
__init__
|
:param datalake_service: data lake account service
:param timeouts: :class:`Timeouts` for datalake gen2 operations
:param fs_handler_cls: How to create FilesystemHandlers for interacting with
individual file systems in this account
:type datalake_service: azure.storage.filedatalake.DataLakeServiceClient
https://azuresdkdocs.blob.core.windows.net/$web/python/azure-storage-file-datalake/12.1.1/azure.storage.filedatalake.html#azure.storage.filedatalake.DataLakeServiceClient
|
def __init__(
self,
datalake_service: azure.storage.filedatalake.DataLakeServiceClient,
timeouts=DEFAULT_TIMEOUTS,
fs_handler_cls=FilesystemHandler
):
"""
:param datalake_service: data lake account service
:param timeouts: :class:`Timeouts` for datalake gen2 operations
:param fs_handler_cls: How to create FilesystemHandlers for interacting with
individual file systems in this account
:type datalake_service: azure.storage.filedatalake.DataLakeServiceClient
https://azuresdkdocs.blob.core.windows.net/$web/python/azure-storage-file-datalake/12.1.1/azure.storage.filedatalake.html#azure.storage.filedatalake.DataLakeServiceClient
"""
super().__init__()
self.datalake_service = datalake_service
self.file_system_handlers = {}
self.timeouts = timeouts
self.fs_handler_cls = fs_handler_cls
|
(self, datalake_service: azure.storage.filedatalake._data_lake_service_client.DataLakeServiceClient, timeouts=Timeouts(file_client_timeout=None, file_system_timeout=None, datalake_service_timeout=None, directory_client_timeout=None), fs_handler_cls=<class 'pyarrowfs_adlgen2.core.FilesystemHandler'>)
|
723,402
|
pyarrowfs_adlgen2.core
|
__neq__
| null |
def __neq__(self, other):
if isinstance(other, AccountHandler):
return (
self.datalake_service != other.datalake_service
or self.timeouts != other.timeouts)
return NotImplemented
|
(self, other)
|
723,403
|
pyarrowfs_adlgen2.core
|
_fs
| null |
def _fs(self, fs_name):
if fs_name in self.file_system_handlers:
return self.file_system_handlers[fs_name]
else:
new_fs_handler = self.fs_handler_cls(
self.datalake_service.get_file_system_client(fs_name),
prefix_fs=True,
timeouts=self.timeouts
)
return self.file_system_handlers.setdefault(fs_name, new_fs_handler)
|
(self, fs_name)
|
723,404
|
pyarrowfs_adlgen2.core
|
_get_file_info
| null |
def _get_file_info(self, path):
fs_name, path = self._split_path(path)
if not fs_name:
return pyarrow.fs.FileInfo(
'',
pyarrow.fs.FileType.Directory
)
return self._fs(fs_name)._get_file_info(path)
|
(self, path)
|
723,405
|
pyarrowfs_adlgen2.core
|
_require_path
| null |
def _require_path(self, path):
if not path:
raise ValueError('Files can not exist on the root account level')
|
(self, path)
|
723,406
|
pyarrowfs_adlgen2.core
|
_split_path
| null |
def _split_path(self, path):
path = self.normalize_path(path)
if '/' not in path:
return path, ''
fs_name, *rest = path.split('/')
path = '/'.join(rest)
if path.endswith('/'):
raise ValueError(f'{path} is an illegal path (may not end with /)')
return fs_name, path
|
(self, path)
|
723,407
|
pyarrowfs_adlgen2.core
|
copy_file
| null |
def copy_file(self, src, dest):
try:
dest_fi = self._get_file_info(dest)
if dest_fi.type == pyarrow.fs.FileType.Directory:
raise IsADirectoryError(dest)
except FileNotFoundError:
pass
with self.open_input_stream(src) as read_from:
with self.open_output_stream(dest) as write_to:
write_to.write(read_from.read())
|
(self, src, dest)
|
723,408
|
pyarrowfs_adlgen2.core
|
create_dir
| null |
def create_dir(self, path, recursive):
fs_name, path = self._split_path(path)
# This `create_dir` requires us to create a container if it does not exist
if recursive or not path:
try:
self.create_file_system(fs_name)
except azure.core.exceptions.ResourceExistsError:
pass
except azure.core.exceptions.HttpResponseError as e:
if 'AuthorizationFailure' in e.message:
# We don't have permission to create the file system, but it might still exist.
pass
else:
raise FileNotFoundError(fs_name) from e
if path:
try:
self._fs(fs_name).create_dir(path, recursive)
except azure.core.exceptions.ResourceNotFoundError as e:
raise FileNotFoundError(fs_name, path) from e
|
(self, path, recursive)
|
723,409
|
pyarrowfs_adlgen2.core
|
create_file_system
|
Return result of azure.storage.filedatalake.DataLakeServiceClient.create_file_system
Affected by self.timeouts.datalake_service_timeout
|
@document_timeout(
azure.storage.filedatalake.DataLakeServiceClient.create_file_system,
"datalake_service_timeout")
def create_file_system(self, name):
return self.datalake_service.create_file_system(
name, timeout=self.timeouts.datalake_service_timeout
)
|
(self, name)
|
723,410
|
pyarrowfs_adlgen2.core
|
delete_dir
| null |
def delete_dir(self, path):
fs_name, path = self._split_path(path)
if not path:
self.delete_file_system(fs_name)
else:
self._fs(fs_name).delete_dir(path)
|
(self, path)
|
723,411
|
pyarrowfs_adlgen2.core
|
delete_dir_contents
| null |
def delete_dir_contents(self, path, accept_root_dir=False):
fs_name, path = self._split_path(path)
if not fs_name:
if accept_root_dir:
for fs in self.list_file_systems():
self.delete_file_system(fs.name)
else:
raise ValueError('Attempt to remove root dir with accept_root_dir=False')
else:
# In _our_ context, root dir can not be within the child file system
self._fs(fs_name).delete_dir_contents(path, accept_root_dir=True)
|
(self, path, accept_root_dir=False)
|
723,412
|
pyarrowfs_adlgen2.core
|
delete_file
| null |
def delete_file(self, path):
fs_name, path = self._split_path(path)
if not fs_name:
raise FileNotFoundError()
elif not path:
raise IsADirectoryError(fs_name)
else:
if fs_name not in [fs.name for fs in self.list_file_systems()]:
raise FileNotFoundError(fs_name)
self._fs(fs_name).delete_file(path)
|
(self, path)
|
723,413
|
pyarrowfs_adlgen2.core
|
delete_file_system
|
Return result of azure.storage.filedatalake.DataLakeServiceClient.delete_file_system
Affected by self.timeouts.datalake_service_timeout
|
@document_timeout(
azure.storage.filedatalake.DataLakeServiceClient.delete_file_system,
"datalake_service_timeout")
def delete_file_system(self, name):
return self.datalake_service.delete_file_system(
name, timeout=self.timeouts.datalake_service_timeout
)
|
(self, name)
|
723,414
|
pyarrowfs_adlgen2.core
|
delete_root_dir_contents
| null |
def delete_root_dir_contents(self):
self.delete_dir_contents("")
|
(self)
|
723,415
|
pyarrowfs_adlgen2.core
|
get_file_info
| null |
def get_file_info(self, paths):
return [self._get_file_info(path) for path in paths]
|
(self, paths)
|
723,416
|
pyarrowfs_adlgen2.core
|
get_file_info_selector
| null |
def get_file_info_selector(self, selector: pyarrow.fs.FileSelector):
fs_name, path = self._split_path(selector.base_dir)
if not fs_name:
file_system_data = [
pyarrow.fs.FileInfo(fs.name, pyarrow.fs.FileType.Directory, mtime=fs.last_modified)
for fs in self.list_file_systems()
]
if selector.recursive:
for fs in self.list_file_systems():
file_system_data.extend(self._fs(fs.name).get_file_info_selector(selector))
return file_system_data
else:
sub_selector = pyarrow.fs.FileSelector(
path, allow_not_found=selector.allow_not_found, recursive=selector.recursive
)
return self._fs(fs_name).get_file_info_selector(sub_selector)
|
(self, selector: pyarrow._fs.FileSelector)
|
723,417
|
pyarrowfs_adlgen2.core
|
get_type_name
| null |
def get_type_name(self):
# azure blob file system
return f'abfs+{self.datalake_service.account_name}'
|
(self)
|
723,418
|
pyarrowfs_adlgen2.core
|
list_file_systems
|
Return result of azure.storage.filedatalake.DataLakeServiceClient.list_file_systems
Affected by self.timeouts.datalake_service_timeout
|
@document_timeout(
azure.storage.filedatalake.DataLakeServiceClient.list_file_systems,
"datalake_service_timeout")
def list_file_systems(self):
return self.datalake_service.list_file_systems(
timeout=self.timeouts.datalake_service_timeout)
|
(self)
|
723,419
|
pyarrowfs_adlgen2.core
|
move
| null |
def move(self, src, dest):
src_fs, src_path = self._split_path(src)
dst_fs, dst_path = self._split_path(dest)
if not src_path:
raise ValueError(f'Unsupported operation: moving fs {src_fs}')
if not dst_path:
raise ValueError(f'Unsupported operation: new name is file system {dst_fs}')
# Assume source exists, let caller deal with error
fi = self._fs(src_fs).get_file_info([src_path])[0]
try:
dest_fi = self.get_file_info([dest])[0]
if dest_fi.type == pyarrow.fs.FileType.Directory:
# Allow only if it is empty
selector = pyarrow.fs.FileSelector(dest, recursive=False)
if self.get_file_info_selector(selector):
raise ValueError(f'{dest} is non-empty directory')
if fi.type != dest_fi.type:
raise ValueError(f'src {src} is {fi.type}, but dest {dest} is {dest_fi.type}')
except FileNotFoundError:
pass
if fi.is_file:
self._fs(src_fs).rename_file(src_path, dest)
else:
self._fs(src_fs).rename_directory(src_path, dest)
|
(self, src, dest)
|
723,420
|
pyarrowfs_adlgen2.core
|
normalize_path
| null |
def normalize_path(self, path):
return path.lstrip('/').rstrip('/')
|
(self, path)
|
723,421
|
pyarrowfs_adlgen2.core
|
open_append_stream
|
Return an open output stream
Append to contents at `path`, if there are any.
Metadata if provided must use keys acceptable to
azure.storage.filedatalake.ContentSettings,
for example: content_type, content_encoding, cache_control
:param path: `str`
:param metadata: `dict`
|
def open_append_stream(self, path, metadata=None):
"""Return an open output stream
Append to contents at `path`, if there are any.
Metadata if provided must use keys acceptable to
azure.storage.filedatalake.ContentSettings,
for example: content_type, content_encoding, cache_control
:param path: `str`
:param metadata: `dict`
"""
fs_name, path = self._split_path(path)
self._require_path(path)
return self._fs(fs_name).open_append_stream(path, metadata=metadata)
|
(self, path, metadata=None)
|
723,422
|
pyarrowfs_adlgen2.core
|
open_input_file
| null |
def open_input_file(self, path):
fs_name, path = self._split_path(path)
self._require_path(path)
return self._fs(fs_name).open_input_file(path)
|
(self, path)
|
723,423
|
pyarrowfs_adlgen2.core
|
open_input_stream
| null |
def open_input_stream(self, path):
fs_name, path = self._split_path(path)
self._require_path(path)
return self._fs(fs_name).open_input_stream(path)
|
(self, path)
|
723,424
|
pyarrowfs_adlgen2.core
|
open_output_stream
|
Return an open output stream
Overwrite contents at `path`, if there are any.
Metadata if provided must use keys acceptable to
azure.storage.filedatalake.ContentSettings,
for example: content_type, content_encoding, cache_control
:param path: `str`
:param metadata: `dict`
|
def open_output_stream(self, path, metadata=None):
"""Return an open output stream
Overwrite contents at `path`, if there are any.
Metadata if provided must use keys acceptable to
azure.storage.filedatalake.ContentSettings,
for example: content_type, content_encoding, cache_control
:param path: `str`
:param metadata: `dict`
"""
fs_name, path = self._split_path(path)
self._require_path(path)
return self._fs(fs_name).open_output_stream(path, metadata=metadata)
|
(self, path, metadata=None)
|
723,425
|
pyarrowfs_adlgen2.core
|
to_fs
| null |
def to_fs(self):
return pyarrow.fs.PyFileSystem(self)
|
(self)
|
723,426
|
pyarrowfs_adlgen2.core
|
FilesystemHandler
|
Handler for a single file system within an azure storage account.
Use this if you do not have access to the account itself, f. ex. if you have a SAS token
that has access only to a single file system.
|
class FilesystemHandler(pyarrow.fs.FileSystemHandler):
"""
Handler for a single file system within an azure storage account.
Use this if you do not have access to the account itself, f. ex. if you have a SAS token
that has access only to a single file system.
"""
def __init__(
self,
file_system_client: azure.storage.filedatalake.FileSystemClient,
prefix_fs=False,
timeouts=DEFAULT_TIMEOUTS
):
"""
:param file_system_client:
:param prefix_fs: If True, prefix the name of the file system to all generated paths
:param timeouts: :class:`Timeouts` for datalake gen2 operations
:type file_system_client: azure.storage.filedatalake.FileSystemClient
:type prefix_fs: bool
https://azuresdkdocs.blob.core.windows.net/$web/python/azure-storage-file-datalake/12.1.1/azure.storage.filedatalake.html#azure.storage.filedatalake.FileSystemClient
"""
super().__init__()
self.prefix_fs = prefix_fs
self.file_system_client = file_system_client
self.timeouts = timeouts
def _prefix(self, path):
if self.prefix_fs and path:
return f'{self.file_system_client.file_system_name}/{path}'
elif self.prefix_fs and not path:
return self.file_system_client.file_system_name
else:
return path
@document_timeout(
azure.storage.filedatalake.FileSystemClient.get_paths,
"file_system_timeout")
def get_paths(self, path, recursive=False):
return self.file_system_client.get_paths(
path, recursive=recursive, timeout=self.timeouts.file_system_timeout
)
@document_timeout(
azure.storage.filedatalake.DataLakeFileClient.rename_file,
"file_client_timeout")
def rename_file(self, src_path, dest_path):
return self.file_system_client.get_file_client(src_path).rename_file(
dest_path, timeout=self.timeouts.file_client_timeout
)
@document_timeout(
azure.storage.filedatalake.DataLakeDirectoryClient.rename_directory,
"directory_client_timeout")
def rename_directory(self, src_path, dest_path):
return self.file_system_client.get_directory_client(src_path).rename_directory(
dest_path, timeout=self.timeouts.directory_client_timeout
)
@document_timeout(
azure.storage.filedatalake.FileSystemClient.create_directory,
"file_system_timeout")
def create_directory(self, path):
return self.file_system_client.create_directory(
path, timeout=self.timeouts.file_system_timeout)
@document_timeout(
azure.storage.filedatalake.FileSystemClient.delete_directory,
"file_system_timeout")
def delete_directory(self, path):
return self.file_system_client.delete_directory(
path, timeout=self.timeouts.file_system_timeout
)
@document_timeout(
azure.storage.filedatalake.DataLakeFileClient.delete_file,
"file_client_timeout")
def delete_file_(self, path):
return self.file_system_client.get_file_client(path).delete_file(
timeout=self.timeouts.file_client_timeout
)
@classmethod
def from_account_name(
cls,
account_name,
file_system_name,
credential=None,
timeouts=DEFAULT_TIMEOUTS
):
"""
Create from storage account name, file system name and credential
:param account_name:
:param file_system_name:
:param credential: Any valid valid value to pass as credential to
azure.storage.filedatalake.FileSystemClient
:param timeouts: :class:`Timeouts` for datalake gen2 operations
:type credential: str for SAS tokens, None for public access,
any credential from azure.identity
:return: FilesystemHandler
"""
client = azure.storage.filedatalake.FileSystemClient(
f'https://{account_name}.dfs.core.windows.net',
file_system_name,
credential=credential
)
return cls(client, timeouts=timeouts)
def __eq__(self, other):
if isinstance(other, FilesystemHandler):
return (
self.file_system_client == other.file_system_client
and self.timeouts == other.timeouts)
return NotImplemented
def __neq__(self, other):
if isinstance(other, FilesystemHandler):
return (
self.file_system_client != other.file_system_client
or self.timeouts != other.timeouts)
return NotImplemented
def get_type_name(self):
# azure blob file system
return f"abfs+{self.file_system_client.account_name}/{self.file_system_client.file_system_name}"
def normalize_path(self, path: str):
return path.lstrip('/').rstrip('/')
def _create_file_info(
self,
path_properties: azure.storage.filedatalake._models.PathProperties
):
if path_properties.is_directory:
path_type = pyarrow.fs.FileType.Directory
else:
path_type = pyarrow.fs.FileType.File
return pyarrow.fs.FileInfo(
self._prefix(path_properties.name),
path_type,
size=path_properties.content_length,
mtime=_parse_azure_ts(path_properties.last_modified)
)
def _verify_is_dir(self, path: str):
if path in {'', '/'}:
# The root always exists
return
try:
parent = os.path.dirname(path)
path_property_result = self.get_paths(parent, recursive=False)
for path_properties in path_property_result:
if path_properties.name == path:
if not path_properties.is_directory:
raise NotADirectoryError(self._prefix(path))
return
raise NotADirectoryError(self._prefix(path))
except azure.core.exceptions.HttpResponseError as e:
if e.status_code == 404:
raise FileNotFoundError(self._prefix(path))
else:
raise
def _get_file_info(self, path):
if not path.lstrip('/'):
return pyarrow.fs.FileInfo(
self.file_system_client.file_system_name if self.prefix_fs else '',
pyarrow.fs.FileType.Directory
)
parent = os.path.dirname(path)
listing = self.get_paths(parent, recursive=False)
for path_properties in listing:
if path_properties.name == path:
return self._create_file_info(path_properties)
raise FileNotFoundError(self._prefix(path))
def get_file_info(self, paths: [str]):
return [
self._get_file_info(self.normalize_path(path)) for path in paths
]
def get_file_info_selector(self, selector: pyarrow.fs.FileSelector):
try:
self._verify_is_dir(self.normalize_path(selector.base_dir))
except FileNotFoundError:
if selector.allow_not_found:
return []
else:
raise
listing = self.get_paths(
self.normalize_path(selector.base_dir),
recursive=selector.recursive
)
return [
self._create_file_info(path_properties)
for path_properties in listing
]
def create_dir(self, path, recursive):
path = self.normalize_path(path)
if recursive:
self.create_directory(path)
else:
parent = os.path.dirname(path)
self._verify_is_dir(parent)
self.create_directory(path)
def delete_dir(self, path):
path = self.normalize_path(path)
self._verify_is_dir(path)
self.delete_directory(path)
def delete_dir_contents(self, path, accept_root_dir=False):
path = self.normalize_path(path)
self._verify_is_dir(path)
if not accept_root_dir and path in {'', '/'}:
raise ValueError('Attempt to delete root dir with accept_root_dir=False')
for path_properties in self.get_paths(path, recursive=False):
if path_properties.is_directory:
self.delete_directory(path_properties.name)
else:
self.delete_file_(path_properties.name)
def delete_root_dir_contents(self):
self.delete_dir_contents("")
def delete_file(self, path):
path = self.normalize_path(path)
file_info: pyarrow.fs.FileInfo = self.get_file_info([path])[0]
if not file_info.is_file:
raise IsADirectoryError(self._prefix(path))
self.delete_file_(path)
def move(self, src, dest):
# This is a simple rename. Caveat: the dest path is not relative to the file_system,
# the azure-sdk expects the file system to be prefixed to the new path.
src = self.normalize_path(src)
dest = self.normalize_path(dest)
src_info = self.get_file_info([src])[0]
if src_info.type == pyarrow.fs.FileType.Directory:
self.rename_directory(src, self.file_system_client.file_system_name + '/' + dest)
else:
self.rename_file(src, self.file_system_client.file_system_name + '/' + dest)
def copy_file(self, src, dest):
src = self.normalize_path(src)
dest = self.normalize_path(dest)
try:
info = self.get_file_info([dest])[0]
if info.type == pyarrow.fs.FileType.Directory:
raise IsADirectoryError(self._prefix(dest))
except FileNotFoundError as ignore: # noqa
pass
# There is actually no API call to do this, so it must be implemented with read/write
with self.open_input_stream(src) as source:
with self.open_output_stream(dest) as out:
out.write(source.read())
def open_input_stream(self, path):
path = self.normalize_path(path)
self._verify_is_file(path)
fc = self.file_system_client.get_file_client(path)
return pyarrow.PythonFile(DatalakeGen2File(fc, mode='rb', timeouts=self.timeouts))
def open_input_file(self, path):
path = self.normalize_path(path)
self._verify_is_file(path)
fc = self.file_system_client.get_file_client(path)
return pyarrow.PythonFile(DatalakeGen2File(fc, mode='rb', timeouts=self.timeouts))
def _set_metadata(self, fc, metadata):
if metadata:
# pyarrow sends bytes, which we can't splat into the init of ContentSettings due to TypeError
# a simple workaround would be to just type the keys manually, do a lookup -- but if we did,
# we would need to update this location if the SDK were to ever add anything (which they might never).
# However, these are HTTP header names in our case, so we let's hope they don't contain any characters that
# wouldn't decode to something meaningful with the default encoding
metadata = {str(key): metadata[key] for key in metadata}
settings = ContentSettings(**metadata)
if fc.exists():
fc.set_http_headers(settings)
else:
fc.create_file(content_settings=settings)
def open_output_stream(self, path, metadata=None):
"""Return an open output stream
Overwrite contents at `path`, if there are any.
Metadata if provided must use keys acceptable to
azure.storage.filedatalake.ContentSettings,
for example: content_type, content_encoding, cache_control
:param path: `str`
:param metadata: `dict`
"""
path = self.normalize_path(path)
fc = self.file_system_client.get_file_client(path)
self._set_metadata(fc, metadata)
return pyarrow.PythonFile(DatalakeGen2File(fc, mode='wb', timeouts=self.timeouts))
def open_append_stream(self, path, metadata=None):
"""Return an open output stream
Append to contents at `path`, if there are any.
Metadata if provided must use keys acceptable to
azure.storage.filedatalake.ContentSettings,
for example: content_type, content_encoding, cache_control
:param path: `str`
:param metadata: `dict`
"""
path = self.normalize_path(path)
fc = self.file_system_client.get_file_client(path)
self._set_metadata(fc, metadata)
return pyarrow.PythonFile(DatalakeGen2File(fc, mode='ab', timeouts=self.timeouts))
def _verify_is_file(self, path):
info = self.get_file_info([path])[0]
if not info.is_file:
raise FileNotFoundError(self._prefix(path))
def to_fs(self):
return pyarrow.fs.PyFileSystem(self)
|
(file_system_client: azure.storage.filedatalake._file_system_client.FileSystemClient, prefix_fs=False, timeouts=Timeouts(file_client_timeout=None, file_system_timeout=None, datalake_service_timeout=None, directory_client_timeout=None))
|
723,427
|
pyarrowfs_adlgen2.core
|
__eq__
| null |
def __eq__(self, other):
if isinstance(other, FilesystemHandler):
return (
self.file_system_client == other.file_system_client
and self.timeouts == other.timeouts)
return NotImplemented
|
(self, other)
|
723,428
|
pyarrowfs_adlgen2.core
|
__init__
|
:param file_system_client:
:param prefix_fs: If True, prefix the name of the file system to all generated paths
:param timeouts: :class:`Timeouts` for datalake gen2 operations
:type file_system_client: azure.storage.filedatalake.FileSystemClient
:type prefix_fs: bool
https://azuresdkdocs.blob.core.windows.net/$web/python/azure-storage-file-datalake/12.1.1/azure.storage.filedatalake.html#azure.storage.filedatalake.FileSystemClient
|
def __init__(
self,
file_system_client: azure.storage.filedatalake.FileSystemClient,
prefix_fs=False,
timeouts=DEFAULT_TIMEOUTS
):
"""
:param file_system_client:
:param prefix_fs: If True, prefix the name of the file system to all generated paths
:param timeouts: :class:`Timeouts` for datalake gen2 operations
:type file_system_client: azure.storage.filedatalake.FileSystemClient
:type prefix_fs: bool
https://azuresdkdocs.blob.core.windows.net/$web/python/azure-storage-file-datalake/12.1.1/azure.storage.filedatalake.html#azure.storage.filedatalake.FileSystemClient
"""
super().__init__()
self.prefix_fs = prefix_fs
self.file_system_client = file_system_client
self.timeouts = timeouts
|
(self, file_system_client: azure.storage.filedatalake._file_system_client.FileSystemClient, prefix_fs=False, timeouts=Timeouts(file_client_timeout=None, file_system_timeout=None, datalake_service_timeout=None, directory_client_timeout=None))
|
723,429
|
pyarrowfs_adlgen2.core
|
__neq__
| null |
def __neq__(self, other):
if isinstance(other, FilesystemHandler):
return (
self.file_system_client != other.file_system_client
or self.timeouts != other.timeouts)
return NotImplemented
|
(self, other)
|
723,430
|
pyarrowfs_adlgen2.core
|
_create_file_info
| null |
def _create_file_info(
self,
path_properties: azure.storage.filedatalake._models.PathProperties
):
if path_properties.is_directory:
path_type = pyarrow.fs.FileType.Directory
else:
path_type = pyarrow.fs.FileType.File
return pyarrow.fs.FileInfo(
self._prefix(path_properties.name),
path_type,
size=path_properties.content_length,
mtime=_parse_azure_ts(path_properties.last_modified)
)
|
(self, path_properties: azure.storage.filedatalake._models.PathProperties)
|
723,431
|
pyarrowfs_adlgen2.core
|
_get_file_info
| null |
def _get_file_info(self, path):
if not path.lstrip('/'):
return pyarrow.fs.FileInfo(
self.file_system_client.file_system_name if self.prefix_fs else '',
pyarrow.fs.FileType.Directory
)
parent = os.path.dirname(path)
listing = self.get_paths(parent, recursive=False)
for path_properties in listing:
if path_properties.name == path:
return self._create_file_info(path_properties)
raise FileNotFoundError(self._prefix(path))
|
(self, path)
|
723,432
|
pyarrowfs_adlgen2.core
|
_prefix
| null |
def _prefix(self, path):
if self.prefix_fs and path:
return f'{self.file_system_client.file_system_name}/{path}'
elif self.prefix_fs and not path:
return self.file_system_client.file_system_name
else:
return path
|
(self, path)
|
723,433
|
pyarrowfs_adlgen2.core
|
_set_metadata
| null |
def _set_metadata(self, fc, metadata):
if metadata:
# pyarrow sends bytes, which we can't splat into the init of ContentSettings due to TypeError
# a simple workaround would be to just type the keys manually, do a lookup -- but if we did,
# we would need to update this location if the SDK were to ever add anything (which they might never).
# However, these are HTTP header names in our case, so we let's hope they don't contain any characters that
# wouldn't decode to something meaningful with the default encoding
metadata = {str(key): metadata[key] for key in metadata}
settings = ContentSettings(**metadata)
if fc.exists():
fc.set_http_headers(settings)
else:
fc.create_file(content_settings=settings)
|
(self, fc, metadata)
|
723,434
|
pyarrowfs_adlgen2.core
|
_verify_is_dir
| null |
def _verify_is_dir(self, path: str):
if path in {'', '/'}:
# The root always exists
return
try:
parent = os.path.dirname(path)
path_property_result = self.get_paths(parent, recursive=False)
for path_properties in path_property_result:
if path_properties.name == path:
if not path_properties.is_directory:
raise NotADirectoryError(self._prefix(path))
return
raise NotADirectoryError(self._prefix(path))
except azure.core.exceptions.HttpResponseError as e:
if e.status_code == 404:
raise FileNotFoundError(self._prefix(path))
else:
raise
|
(self, path: str)
|
723,435
|
pyarrowfs_adlgen2.core
|
_verify_is_file
| null |
def _verify_is_file(self, path):
info = self.get_file_info([path])[0]
if not info.is_file:
raise FileNotFoundError(self._prefix(path))
|
(self, path)
|
723,436
|
pyarrowfs_adlgen2.core
|
copy_file
| null |
def copy_file(self, src, dest):
src = self.normalize_path(src)
dest = self.normalize_path(dest)
try:
info = self.get_file_info([dest])[0]
if info.type == pyarrow.fs.FileType.Directory:
raise IsADirectoryError(self._prefix(dest))
except FileNotFoundError as ignore: # noqa
pass
# There is actually no API call to do this, so it must be implemented with read/write
with self.open_input_stream(src) as source:
with self.open_output_stream(dest) as out:
out.write(source.read())
|
(self, src, dest)
|
723,437
|
pyarrowfs_adlgen2.core
|
create_dir
| null |
def create_dir(self, path, recursive):
path = self.normalize_path(path)
if recursive:
self.create_directory(path)
else:
parent = os.path.dirname(path)
self._verify_is_dir(parent)
self.create_directory(path)
|
(self, path, recursive)
|
723,438
|
pyarrowfs_adlgen2.core
|
create_directory
|
Return result of azure.storage.filedatalake.FileSystemClient.create_directory
Affected by self.timeouts.file_system_timeout
|
@document_timeout(
azure.storage.filedatalake.FileSystemClient.create_directory,
"file_system_timeout")
def create_directory(self, path):
return self.file_system_client.create_directory(
path, timeout=self.timeouts.file_system_timeout)
|
(self, path)
|
723,439
|
pyarrowfs_adlgen2.core
|
delete_dir
| null |
def delete_dir(self, path):
path = self.normalize_path(path)
self._verify_is_dir(path)
self.delete_directory(path)
|
(self, path)
|
723,440
|
pyarrowfs_adlgen2.core
|
delete_dir_contents
| null |
def delete_dir_contents(self, path, accept_root_dir=False):
path = self.normalize_path(path)
self._verify_is_dir(path)
if not accept_root_dir and path in {'', '/'}:
raise ValueError('Attempt to delete root dir with accept_root_dir=False')
for path_properties in self.get_paths(path, recursive=False):
if path_properties.is_directory:
self.delete_directory(path_properties.name)
else:
self.delete_file_(path_properties.name)
|
(self, path, accept_root_dir=False)
|
723,441
|
pyarrowfs_adlgen2.core
|
delete_directory
|
Return result of azure.storage.filedatalake.FileSystemClient.delete_directory
Affected by self.timeouts.file_system_timeout
|
@document_timeout(
azure.storage.filedatalake.FileSystemClient.delete_directory,
"file_system_timeout")
def delete_directory(self, path):
return self.file_system_client.delete_directory(
path, timeout=self.timeouts.file_system_timeout
)
|
(self, path)
|
723,442
|
pyarrowfs_adlgen2.core
|
delete_file
| null |
def delete_file(self, path):
path = self.normalize_path(path)
file_info: pyarrow.fs.FileInfo = self.get_file_info([path])[0]
if not file_info.is_file:
raise IsADirectoryError(self._prefix(path))
self.delete_file_(path)
|
(self, path)
|
723,443
|
pyarrowfs_adlgen2.core
|
delete_file_
|
Return result of azure.storage.filedatalake.DataLakeFileClient.delete_file
Affected by self.timeouts.file_client_timeout
|
@document_timeout(
azure.storage.filedatalake.DataLakeFileClient.delete_file,
"file_client_timeout")
def delete_file_(self, path):
return self.file_system_client.get_file_client(path).delete_file(
timeout=self.timeouts.file_client_timeout
)
|
(self, path)
|
723,445
|
pyarrowfs_adlgen2.core
|
get_file_info
| null |
def get_file_info(self, paths: [str]):
return [
self._get_file_info(self.normalize_path(path)) for path in paths
]
|
(self, paths: [<class 'str'>])
|
723,446
|
pyarrowfs_adlgen2.core
|
get_file_info_selector
| null |
def get_file_info_selector(self, selector: pyarrow.fs.FileSelector):
try:
self._verify_is_dir(self.normalize_path(selector.base_dir))
except FileNotFoundError:
if selector.allow_not_found:
return []
else:
raise
listing = self.get_paths(
self.normalize_path(selector.base_dir),
recursive=selector.recursive
)
return [
self._create_file_info(path_properties)
for path_properties in listing
]
|
(self, selector: pyarrow._fs.FileSelector)
|
723,447
|
pyarrowfs_adlgen2.core
|
get_paths
|
Return result of azure.storage.filedatalake.FileSystemClient.get_paths
Affected by self.timeouts.file_system_timeout
|
@document_timeout(
azure.storage.filedatalake.FileSystemClient.get_paths,
"file_system_timeout")
def get_paths(self, path, recursive=False):
return self.file_system_client.get_paths(
path, recursive=recursive, timeout=self.timeouts.file_system_timeout
)
|
(self, path, recursive=False)
|
723,448
|
pyarrowfs_adlgen2.core
|
get_type_name
| null |
def get_type_name(self):
# azure blob file system
return f"abfs+{self.file_system_client.account_name}/{self.file_system_client.file_system_name}"
|
(self)
|
723,449
|
pyarrowfs_adlgen2.core
|
move
| null |
def move(self, src, dest):
# This is a simple rename. Caveat: the dest path is not relative to the file_system,
# the azure-sdk expects the file system to be prefixed to the new path.
src = self.normalize_path(src)
dest = self.normalize_path(dest)
src_info = self.get_file_info([src])[0]
if src_info.type == pyarrow.fs.FileType.Directory:
self.rename_directory(src, self.file_system_client.file_system_name + '/' + dest)
else:
self.rename_file(src, self.file_system_client.file_system_name + '/' + dest)
|
(self, src, dest)
|
723,450
|
pyarrowfs_adlgen2.core
|
normalize_path
| null |
def normalize_path(self, path: str):
return path.lstrip('/').rstrip('/')
|
(self, path: str)
|
723,451
|
pyarrowfs_adlgen2.core
|
open_append_stream
|
Return an open output stream
Append to contents at `path`, if there are any.
Metadata if provided must use keys acceptable to
azure.storage.filedatalake.ContentSettings,
for example: content_type, content_encoding, cache_control
:param path: `str`
:param metadata: `dict`
|
def open_append_stream(self, path, metadata=None):
"""Return an open output stream
Append to contents at `path`, if there are any.
Metadata if provided must use keys acceptable to
azure.storage.filedatalake.ContentSettings,
for example: content_type, content_encoding, cache_control
:param path: `str`
:param metadata: `dict`
"""
path = self.normalize_path(path)
fc = self.file_system_client.get_file_client(path)
self._set_metadata(fc, metadata)
return pyarrow.PythonFile(DatalakeGen2File(fc, mode='ab', timeouts=self.timeouts))
|
(self, path, metadata=None)
|
723,452
|
pyarrowfs_adlgen2.core
|
open_input_file
| null |
def open_input_file(self, path):
path = self.normalize_path(path)
self._verify_is_file(path)
fc = self.file_system_client.get_file_client(path)
return pyarrow.PythonFile(DatalakeGen2File(fc, mode='rb', timeouts=self.timeouts))
|
(self, path)
|
723,453
|
pyarrowfs_adlgen2.core
|
open_input_stream
| null |
def open_input_stream(self, path):
path = self.normalize_path(path)
self._verify_is_file(path)
fc = self.file_system_client.get_file_client(path)
return pyarrow.PythonFile(DatalakeGen2File(fc, mode='rb', timeouts=self.timeouts))
|
(self, path)
|
723,454
|
pyarrowfs_adlgen2.core
|
open_output_stream
|
Return an open output stream
Overwrite contents at `path`, if there are any.
Metadata if provided must use keys acceptable to
azure.storage.filedatalake.ContentSettings,
for example: content_type, content_encoding, cache_control
:param path: `str`
:param metadata: `dict`
|
def open_output_stream(self, path, metadata=None):
"""Return an open output stream
Overwrite contents at `path`, if there are any.
Metadata if provided must use keys acceptable to
azure.storage.filedatalake.ContentSettings,
for example: content_type, content_encoding, cache_control
:param path: `str`
:param metadata: `dict`
"""
path = self.normalize_path(path)
fc = self.file_system_client.get_file_client(path)
self._set_metadata(fc, metadata)
return pyarrow.PythonFile(DatalakeGen2File(fc, mode='wb', timeouts=self.timeouts))
|
(self, path, metadata=None)
|
723,455
|
pyarrowfs_adlgen2.core
|
rename_directory
|
Return result of azure.storage.filedatalake.DataLakeDirectoryClient.rename_directory
Affected by self.timeouts.directory_client_timeout
|
@document_timeout(
azure.storage.filedatalake.DataLakeDirectoryClient.rename_directory,
"directory_client_timeout")
def rename_directory(self, src_path, dest_path):
return self.file_system_client.get_directory_client(src_path).rename_directory(
dest_path, timeout=self.timeouts.directory_client_timeout
)
|
(self, src_path, dest_path)
|
723,456
|
pyarrowfs_adlgen2.core
|
rename_file
|
Return result of azure.storage.filedatalake.DataLakeFileClient.rename_file
Affected by self.timeouts.file_client_timeout
|
@document_timeout(
azure.storage.filedatalake.DataLakeFileClient.rename_file,
"file_client_timeout")
def rename_file(self, src_path, dest_path):
return self.file_system_client.get_file_client(src_path).rename_file(
dest_path, timeout=self.timeouts.file_client_timeout
)
|
(self, src_path, dest_path)
|
723,458
|
pyarrowfs_adlgen2.core
|
Timeouts
|
Timeouts passed to azure.storage.filedatalake operations
The value of these are provided as the timeout kwarg to the
corresponding object in azure.storage.filedatalake. Timeout
units are in seconds.
|
class Timeouts:
"""Timeouts passed to azure.storage.filedatalake operations
The value of these are provided as the timeout kwarg to the
corresponding object in azure.storage.filedatalake. Timeout
units are in seconds."""
file_client_timeout: typing.Optional[int]
file_system_timeout: typing.Optional[int]
datalake_service_timeout: typing.Optional[int]
directory_client_timeout: typing.Optional[int]
def __init__(
self,
file_client_timeout: typing.Optional[int] = None,
file_system_timeout: typing.Optional[int] = None,
datalake_service_timeout: typing.Optional[int] = None,
directory_client_timeout: typing.Optional[int] = None
):
"""
:param file_client_timeout: timeout in seconds to pass to
azure.storage.filedatalake.DataLakeFileClient methods
:param file_system_timeout: timeout in seconds to pass to
azure.storage.filedatalake.FileSystemClient methods
:param datalake_service_timeout: timeout in seconds to pass to
azure.storage.filedatalake.DataLakeServiceClient methods
:param datalake_service_timeout: timeout in seconds to pass to
azure.storage.filedatalake.DataLakeDirectoryClient methods
"""
self.file_client_timeout = file_client_timeout
self.file_system_timeout = file_system_timeout
self.datalake_service_timeout = datalake_service_timeout
self.directory_client_timeout = directory_client_timeout
|
(file_client_timeout: Optional[int] = None, file_system_timeout: Optional[int] = None, datalake_service_timeout: Optional[int] = None, directory_client_timeout: Optional[int] = None)
|
723,459
|
pyarrowfs_adlgen2.core
|
__eq__
| null |
"""
Adapters to access Azure Data Lake gen2 storage through apache arrow
These are fairly thin wrappers around the azure storage sdk:
https://azuresdkdocs.blob.core.windows.net/$web/python/azure-storage-file-datalake/12.1.1/index.html
Many options in the SDK are unused. For example:
* No interaction with the lease (lock) system happens
* No tags or metadata are set on any SDK objects
* Only defaults are used for ACL/access levels (no public access for created file systems)
Instead of trying to shoehorn functionality like the above into the pyarrow.PyFileSystem API,
it is recommended to use the SDK separately to use this sort of functionality.
"""
import os
import datetime
import io
import typing
import dataclasses
import azure.core.exceptions
import azure.storage.filedatalake
from azure.storage.filedatalake import ContentSettings
import pyarrow.fs
def _parse_azure_ts(last_modified):
# Mon, 17 Aug 2020 12:19:35 GMT
if isinstance(last_modified, str):
fmt = "%a, %d %b %Y %H:%M:%S %Z"
return datetime.datetime.strptime(last_modified, fmt)
else:
return last_modified
|
(self, other)
|
723,460
|
pyarrowfs_adlgen2.core
|
__init__
|
:param file_client_timeout: timeout in seconds to pass to
azure.storage.filedatalake.DataLakeFileClient methods
:param file_system_timeout: timeout in seconds to pass to
azure.storage.filedatalake.FileSystemClient methods
:param datalake_service_timeout: timeout in seconds to pass to
azure.storage.filedatalake.DataLakeServiceClient methods
:param datalake_service_timeout: timeout in seconds to pass to
azure.storage.filedatalake.DataLakeDirectoryClient methods
|
def __init__(
self,
file_client_timeout: typing.Optional[int] = None,
file_system_timeout: typing.Optional[int] = None,
datalake_service_timeout: typing.Optional[int] = None,
directory_client_timeout: typing.Optional[int] = None
):
"""
:param file_client_timeout: timeout in seconds to pass to
azure.storage.filedatalake.DataLakeFileClient methods
:param file_system_timeout: timeout in seconds to pass to
azure.storage.filedatalake.FileSystemClient methods
:param datalake_service_timeout: timeout in seconds to pass to
azure.storage.filedatalake.DataLakeServiceClient methods
:param datalake_service_timeout: timeout in seconds to pass to
azure.storage.filedatalake.DataLakeDirectoryClient methods
"""
self.file_client_timeout = file_client_timeout
self.file_system_timeout = file_system_timeout
self.datalake_service_timeout = datalake_service_timeout
self.directory_client_timeout = directory_client_timeout
|
(self, file_client_timeout: Optional[int] = None, file_system_timeout: Optional[int] = None, datalake_service_timeout: Optional[int] = None, directory_client_timeout: Optional[int] = None)
|
723,461
|
pyarrowfs_adlgen2.core
|
__repr__
| null |
def __init__(
self,
file_system_client: azure.storage.filedatalake.FileSystemClient,
prefix_fs=False,
timeouts=DEFAULT_TIMEOUTS
):
"""
:param file_system_client:
:param prefix_fs: If True, prefix the name of the file system to all generated paths
:param timeouts: :class:`Timeouts` for datalake gen2 operations
:type file_system_client: azure.storage.filedatalake.FileSystemClient
:type prefix_fs: bool
https://azuresdkdocs.blob.core.windows.net/$web/python/azure-storage-file-datalake/12.1.1/azure.storage.filedatalake.html#azure.storage.filedatalake.FileSystemClient
"""
super().__init__()
self.prefix_fs = prefix_fs
self.file_system_client = file_system_client
self.timeouts = timeouts
|
(self)
|
723,463
|
celery_once.tasks
|
AlreadyQueued
| null |
class AlreadyQueued(Exception):
def __init__(self, countdown):
self.message = "Expires in {} seconds".format(countdown)
self.countdown = countdown
|
(countdown)
|
723,464
|
celery_once.tasks
|
__init__
| null |
def __init__(self, countdown):
self.message = "Expires in {} seconds".format(countdown)
self.countdown = countdown
|
(self, countdown)
|
723,465
|
celery_once.tasks
|
QueueOnce
| null |
class QueueOnce(Task):
abstract = True
once = {
'graceful': False,
'unlock_before_run': False
}
"""
'There can be only one'. - Highlander (1986)
An abstract tasks with the ability to detect if it has already been queued.
When running the task (through .delay/.apply_async) it checks if the tasks
is not already queued. By default it will raise an
an AlreadyQueued exception if it is, by you can silence this by including
`once={'graceful': True}` in apply_async or in the task's settings.
Example:
>>> from celery_queue.tasks import QueueOnce
>>> from celery import task
>>> @task(base=QueueOnce, once={'graceful': True})
>>> def example(time):
>>> from time import sleep
>>> sleep(time)
"""
@property
def config(self):
app = self._get_app()
return app.conf
@property
def once_config(self):
return self.config.ONCE
@property
def once_backend(self):
return import_backend(self.once_config)
@property
def default_timeout(self):
return self.once_config['settings'].get('default_timeout', 60 * 60)
def unlock_before_run(self):
return self.once.get('unlock_before_run', False)
def __init__(self, *args, **kwargs):
self._siganture = signature(self.run)
return super(QueueOnce, self).__init__(*args, **kwargs)
def __call__(self, *args, **kwargs):
# Only clear the lock before the task's execution if the
# "unlock_before_run" option is True
if self.unlock_before_run():
key = self.get_key(args, kwargs)
self.once_backend.clear_lock(key)
return super(QueueOnce, self).__call__(*args, **kwargs)
def apply_async(self, args=None, kwargs=None, **options):
"""
Attempts to queues a task.
Will raises an AlreadyQueued exception if already queued.
:param \*args: positional arguments passed on to the task.
:param \*\*kwargs: keyword arguments passed on to the task.
:keyword \*\*once: (optional)
:param: graceful: (optional)
If True, wouldn't raise an exception if already queued.
Instead will return none.
:param: timeout: (optional)
An `int' number of seconds after which the lock will expire.
If not set, defaults to 1 hour.
:param: keys: (optional)
"""
once_options = options.get('once', {})
once_graceful = once_options.get(
'graceful', self.once.get('graceful', False))
once_timeout = once_options.get(
'timeout', self.once.get('timeout', self.default_timeout))
if not options.get('retries'):
key = self.get_key(args, kwargs)
try:
self.once_backend.raise_or_lock(key, timeout=once_timeout)
except AlreadyQueued as e:
if once_graceful:
return EagerResult(None, None, states.REJECTED)
raise e
return super(QueueOnce, self).apply_async(args, kwargs, **options)
def _get_call_args(self, args, kwargs):
call_args = self._siganture.bind(*args, **kwargs).arguments
# Remove the task instance from the kwargs. This only happens when the
# task has the 'bind' attribute set to True. We remove it, as the task
# has a memory pointer in its repr, that will change between the task
# caller and the celery worker
if isinstance(call_args.get('self'), Task):
del call_args['self']
return call_args
def get_key(self, args=None, kwargs=None):
"""
Generate the key from the name of the task (e.g. 'tasks.example') and
args/kwargs.
"""
restrict_to = self.once.get('keys', None)
args = args or {}
kwargs = kwargs or {}
call_args = self._get_call_args(args, kwargs)
key = queue_once_key(self.name, call_args, restrict_to)
return key
def after_return(self, status, retval, task_id, args, kwargs, einfo):
"""
After a task has run (both succesfully or with a failure) clear the
lock if "unlock_before_run" is False.
"""
# Only clear the lock after the task's execution if the
# "unlock_before_run" option is False
if not self.unlock_before_run():
key = self.get_key(args, kwargs)
self.once_backend.clear_lock(key)
|
(*args, **kwargs)
|
723,467
|
celery_once.tasks
|
__call__
| null |
def __call__(self, *args, **kwargs):
# Only clear the lock before the task's execution if the
# "unlock_before_run" option is True
if self.unlock_before_run():
key = self.get_key(args, kwargs)
self.once_backend.clear_lock(key)
return super(QueueOnce, self).__call__(*args, **kwargs)
|
(self, *args, **kwargs)
|
723,468
|
celery_once.tasks
|
__init__
| null |
def __init__(self, *args, **kwargs):
self._siganture = signature(self.run)
return super(QueueOnce, self).__init__(*args, **kwargs)
|
(self, *args, **kwargs)
|
723,471
|
celery_once.tasks
|
_get_call_args
| null |
def _get_call_args(self, args, kwargs):
call_args = self._siganture.bind(*args, **kwargs).arguments
# Remove the task instance from the kwargs. This only happens when the
# task has the 'bind' attribute set to True. We remove it, as the task
# has a memory pointer in its repr, that will change between the task
# caller and the celery worker
if isinstance(call_args.get('self'), Task):
del call_args['self']
return call_args
|
(self, args, kwargs)
|
723,476
|
celery_once.tasks
|
after_return
|
After a task has run (both succesfully or with a failure) clear the
lock if "unlock_before_run" is False.
|
def after_return(self, status, retval, task_id, args, kwargs, einfo):
"""
After a task has run (both succesfully or with a failure) clear the
lock if "unlock_before_run" is False.
"""
# Only clear the lock after the task's execution if the
# "unlock_before_run" option is False
if not self.unlock_before_run():
key = self.get_key(args, kwargs)
self.once_backend.clear_lock(key)
|
(self, status, retval, task_id, args, kwargs, einfo)
|
723,478
|
celery_once.tasks
|
apply_async
|
Attempts to queues a task.
Will raises an AlreadyQueued exception if already queued.
:param \*args: positional arguments passed on to the task.
:param \*\*kwargs: keyword arguments passed on to the task.
:keyword \*\*once: (optional)
:param: graceful: (optional)
If True, wouldn't raise an exception if already queued.
Instead will return none.
:param: timeout: (optional)
An `int' number of seconds after which the lock will expire.
If not set, defaults to 1 hour.
:param: keys: (optional)
|
def apply_async(self, args=None, kwargs=None, **options):
"""
Attempts to queues a task.
Will raises an AlreadyQueued exception if already queued.
:param \*args: positional arguments passed on to the task.
:param \*\*kwargs: keyword arguments passed on to the task.
:keyword \*\*once: (optional)
:param: graceful: (optional)
If True, wouldn't raise an exception if already queued.
Instead will return none.
:param: timeout: (optional)
An `int' number of seconds after which the lock will expire.
If not set, defaults to 1 hour.
:param: keys: (optional)
"""
once_options = options.get('once', {})
once_graceful = once_options.get(
'graceful', self.once.get('graceful', False))
once_timeout = once_options.get(
'timeout', self.once.get('timeout', self.default_timeout))
if not options.get('retries'):
key = self.get_key(args, kwargs)
try:
self.once_backend.raise_or_lock(key, timeout=once_timeout)
except AlreadyQueued as e:
if once_graceful:
return EagerResult(None, None, states.REJECTED)
raise e
return super(QueueOnce, self).apply_async(args, kwargs, **options)
|
(self, args=None, kwargs=None, **options)
|
723,482
|
celery_once.tasks
|
get_key
|
Generate the key from the name of the task (e.g. 'tasks.example') and
args/kwargs.
|
def get_key(self, args=None, kwargs=None):
"""
Generate the key from the name of the task (e.g. 'tasks.example') and
args/kwargs.
"""
restrict_to = self.once.get('keys', None)
args = args or {}
kwargs = kwargs or {}
call_args = self._get_call_args(args, kwargs)
key = queue_once_key(self.name, call_args, restrict_to)
return key
|
(self, args=None, kwargs=None)
|
723,503
|
celery_once.tasks
|
unlock_before_run
| null |
def unlock_before_run(self):
return self.once.get('unlock_before_run', False)
|
(self)
|
723,507
|
serial_bus.custom_collections
|
HashableDict
|
A ridiculously simple implementation of a hashable dictionary.
All fields in a SerialBus model must be hashable so that the model
instance itself can be hashable as well. Because Python's native dict
is not hashable, this class might come in handy for SerialBus model
fields that have to be dicts.
Optionally, a code using SerialBus may use its own implementation of a
hashable dictionary by informing it using the 'HASHABLE_DICT_CLS'
environment variable. See serial_bus.config.SerialBusConfig
class for more details.
NOTE: The requirement for a hashable dictionary is due to the fact that each
model instance will be stored in a set data structure, implemented by the
SerialBusSortedSet class.
|
class HashableDict(dict):
"""
A ridiculously simple implementation of a hashable dictionary.
All fields in a SerialBus model must be hashable so that the model
instance itself can be hashable as well. Because Python's native dict
is not hashable, this class might come in handy for SerialBus model
fields that have to be dicts.
Optionally, a code using SerialBus may use its own implementation of a
hashable dictionary by informing it using the 'HASHABLE_DICT_CLS'
environment variable. See serial_bus.config.SerialBusConfig
class for more details.
NOTE: The requirement for a hashable dictionary is due to the fact that each
model instance will be stored in a set data structure, implemented by the
SerialBusSortedSet class.
"""
def __hash__(self):
return hash((frozenset(self), frozenset(self.values())))
| null |
723,508
|
serial_bus.custom_collections
|
__hash__
| null |
def __hash__(self):
return hash((frozenset(self), frozenset(self.values())))
|
(self)
|
723,509
|
serial_bus.models
|
SerialBusBaseModel
|
A pydantic BaseModel class that provides the basic
functionality for all SerialBus Models classes.
Any SerialBusBaseModel child class SHOULD be instantiated from
.create_from_loaded_data() or .create() class methods provided.
Instantiation by using __init__() directly is discouraged.
This class implements functools.total_ordering to allow its instances
to be sorted accordingly inside a datastore.SerialBusSortedSet.
Because datastore.SerialBusSortedSet is a python Set, all
SerialBus models MUST be hashable. This has a twofold consequence:
- Model classes must implement the __hash__() method;
- all attributes of any SerialBus model class must be hashable;
|
class SerialBusBaseModel(BaseModel):
"""A pydantic BaseModel class that provides the basic
functionality for all SerialBus Models classes.
Any SerialBusBaseModel child class SHOULD be instantiated from
.create_from_loaded_data() or .create() class methods provided.
Instantiation by using __init__() directly is discouraged.
This class implements functools.total_ordering to allow its instances
to be sorted accordingly inside a datastore.SerialBusSortedSet.
Because datastore.SerialBusSortedSet is a python Set, all
SerialBus models MUST be hashable. This has a twofold consequence:
- Model classes must implement the __hash__() method;
- all attributes of any SerialBus model class must be hashable;
"""
# this class attribute is used as a registry for all subclasses
# of this one. This is useful to allow transparent discovery of
# such classes, which in turn allows for easy mapping of directives
# to model classes.
_subclasses: ClassVar = []
# class attribute to store a reference to the global data structure
# where all model instances are stored.
_data_store: ClassVar = get_global_data_store()
# this is required to allow fields of non-native python types
model_config = ConfigDict(arbitrary_types_allowed=True)
# represents the keyword in the serialized data used to
# trigger the instantiation of this model class
_directive: str = None
# a tuple of strings with each representing the name of an attribute of
# the model class. This tuple will be used to create a unique identifier
# that will ID this model object in the store.This will also be used as
# a sort key in the data store. Child classes MUST override this attribute.
_key: Tuple[str]
# the global data store doesn't allow for duplication, as it
# implements a set. However, SerialBus doesn't necessarily care if a
# duplication happens.This attribute indicates if an exception should
# be raised in case an attempted duplication is detected.
_err_on_duplicate: bool = False
def __new__(cls, *args, **kwargs):
"""Overrides the __new__ method to prevent direct instantiation of this class."""
if cls == SerialBusBaseModel:
raise ModelInitializationError(
"Cannot instantiate SerialBusBaseModel directly."
)
return super().__new__(cls)
def __init_subclass__(cls, **kwargs):
"""Registers all subclasses of this class in the _subclasses attribute."""
super().__init_subclass__(**kwargs)
cls._subclasses.append(cls)
def __hash__(self):
return hash((type(self),) + tuple(self.__dict__.values()))
def __lt__(self, other):
return self.key <= other.key
def __eq__(self, other):
return str(self) == str(other)
@classmethod
def _normalize_for_validations(cls, dict_args: Dict[str, Any]) -> Dict[str, Any]:
"""
Converts all values of dict_args to tuples when:
1 - the value is a list object; AND
2 - if the key associated with the value matches a filed in the model class; AND
3 - the type associated with the field is of base type 'tuple'.
This is done in the spirit of best-effort to ensure the hash-ability of the model
instance in a seamless way. Although this helps, ideally, users should
guarantee this by themselves by passing tuples instead of lists when the field of
the model class is of type tuple.
TODO: expand on this to allow greater flexibility. For now, it's just a
placeholder to ensure the dict_args is hashable. In the future, this
class should allow a custom type of 'hashable sequence' to be defined
as a means to represent ManyToOne relationships and then automatically
convert collection types (like lists or sets) to this custom type.
Returns:
Dict[str, Any]: the converted dict.
"""
converted_dict = {}
for key, value in dict_args.items():
if isinstance(value, list) and key in cls.__annotations__ and getattr(cls.__annotations__[key], '__origin__', None) is tuple:
value = tuple(value)
converted_dict[key] = value
return converted_dict
@property
def directive(self) -> str:
"""
Returns the value of the `_directive` attribute. This is used to
identify the model class when parsing a file of supported format.
"""
return self._directive
@property
def key(self) -> Tuple:
"""
Returns a tuple containing the values associate with the fields (attributes)
named in the `_key` attribute.
"""
return tuple([getattr(self, attr) for attr in self._key])
@classmethod
def create_from_loaded_data(
cls, data: Union[Dict[str, Any], List[Dict[str, Any]]]
) -> None:
"""Factory class method used to instantiate a model with data
previously loaded and parsed from a file.
Args:
data (Union[Dict[str, Any], List[Dict[str, Any]]]): Data loaded
from a file of supported format and parsed into a Dictionary or List.
Raises:
SerialBusTypeError: If `data` is not a dict nor a list.
"""
if not isinstance(data, dict) and not isinstance(data, list):
raise SerialBusTypeError(
f"Data passed to {cls.__name__} must be of type 'dict' or 'list', but was {type(data)}"
)
if isinstance(data, list):
for _dict in data:
cls.create(dict_args=_dict)
return
cls.create(dict_args=data)
@classmethod
def create(
cls, dict_args: Dict[Any, Any], *args, **kwargs
) -> "SerialBusBaseModel":
"""
Factory class method used to instantiate a model with data
passed as a dictionary.
Args:
dict_args (Dict[Any, Any]): a dictionary containing the data to be used
to instantiate the model.
Returns:
SerialBusBaseModel: the instance of the model created.
"""
dict_args = convert_dict_to_hashabledict(dict_args)
dict_args = cls._normalize_for_validations(dict_args)
new_obj_model = cls.model_validate(dict_args, strict=True, *args, **kwargs)
new_obj_model.ds().save(new_obj_model)
return new_obj_model
@classmethod
def ds(cls) -> ModelsGlobalStore:
"""Returns the global data store where all models are stored."""
return cls._data_store
@classmethod
def filter(cls, search_params: Dict[Any, Any]) -> SerialBusSortedSet:
"""
Returns a SerialBusSortedSet containing all instances of this
class that match the search_params.
Args:
search_params (Dict[Any, Any]): a dictionary with the key being the
attribute of the model and the value being the value to be searched for.
Returns:
SerialBusSortedSet: the SerialBusSortedSet containing the records
that match the search_params.
"""
return cls.ds().filter(cls, search_params)
@classmethod
def get(cls, search_params: Dict[Any, Any]) -> "SerialBusBaseModel":
"""
Retrieves a single instance of this class that matches the provided search parameters.
Args:
search_params (Dict[Any, Any]): A dictionary containing the search parameters. The
keys should correspond to the attribute names of the model, and the values should
correspond to the expected values of these attributes.
Returns:
SerialBusBaseModel: The model instance that matches the search parameters.
Raises:
ModelDoesNotExist: If no instance of this class matches the search parameters.
ModelAlreadyExists: If more than one instance of this class matches the search parameters.
"""
return cls.ds().get(cls, search_params)
@classmethod
def get_all(cls) -> SerialBusSortedSet:
"""Returns a SerialBusSortedSet containing all instances of this class."""
return cls.ds().get_all_by_class(cls)
@classmethod
def get_subclasses(cls):
"""Returns a list of all subclasses of this class."""
return cls._subclasses
@classmethod
def get_subclasses_with_directive(cls):
"""
Returns a list of all subclasses that have a `_directive` attribute set.
The .get_default() method is used here since sub_cls._directive is
inherently a pydantic ModelPrivateAttr.
"""
return [
sub_cls
for sub_cls in cls.get_subclasses()
if sub_cls._directive.get_default()
]
|
() -> None
|
723,518
|
serial_bus.models
|
__hash__
| null |
def __hash__(self):
return hash((type(self),) + tuple(self.__dict__.values()))
|
(self)
|
723,522
|
serial_bus.models
|
__lt__
| null |
def __lt__(self, other):
return self.key <= other.key
|
(self, other)
|
723,523
|
serial_bus.models
|
__new__
|
Overrides the __new__ method to prevent direct instantiation of this class.
|
def __new__(cls, *args, **kwargs):
"""Overrides the __new__ method to prevent direct instantiation of this class."""
if cls == SerialBusBaseModel:
raise ModelInitializationError(
"Cannot instantiate SerialBusBaseModel directly."
)
return super().__new__(cls)
|
(cls, *args, **kwargs)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.