repository_name
stringclasses 316
values | func_path_in_repository
stringlengths 6
223
| func_name
stringlengths 1
134
| language
stringclasses 1
value | func_code_string
stringlengths 57
65.5k
| func_documentation_string
stringlengths 1
46.3k
| split_name
stringclasses 1
value | func_code_url
stringlengths 91
315
| called_functions
listlengths 1
156
⌀ | enclosing_scope
stringlengths 2
1.48M
|
|---|---|---|---|---|---|---|---|---|---|
modlinltd/django-advanced-filters
|
advanced_filters/admin.py
|
AdminAdvancedFiltersMixin.changelist_view
|
python
|
def changelist_view(self, request, extra_context=None):
if extra_context is None:
extra_context = {}
response = self.adv_filters_handle(request,
extra_context=extra_context)
if response:
return response
return super(AdminAdvancedFiltersMixin, self
).changelist_view(request, extra_context=extra_context)
|
Add advanced_filters form to changelist context
|
train
|
https://github.com/modlinltd/django-advanced-filters/blob/ba51e6946d1652796a82b2b95cceffbe1190a227/advanced_filters/admin.py#L93-L102
|
[
"def adv_filters_handle(self, request, extra_context={}):\n data = request.POST if request.POST.get(\n 'action') == 'advanced_filters' else None\n adv_filters_form = self.advanced_filter_form(\n data=data, model_admin=self, extra_form=True)\n extra_context.update({\n 'original_change_list_template': self.original_change_list_template,\n 'advanced_filters': adv_filters_form,\n 'current_afilter': request.GET.get('_afilter'),\n 'app_label': self.opts.app_label,\n })\n return self.save_advanced_filter(request, adv_filters_form)\n"
] |
class AdminAdvancedFiltersMixin(object):
""" Generic AdvancedFilters mixin """
advanced_change_list_template = "admin/advanced_filters.html"
advanced_filter_form = AdvancedFilterForm
def __init__(self, *args, **kwargs):
super(AdminAdvancedFiltersMixin, self).__init__(*args, **kwargs)
if self.change_list_template:
self.original_change_list_template = self.change_list_template
else:
self.original_change_list_template = "admin/change_list.html"
self.change_list_template = self.advanced_change_list_template
# add list filters to filters
self.list_filter = (AdvancedListFilters,) + tuple(self.list_filter)
def save_advanced_filter(self, request, form):
if form.is_valid():
afilter = form.save(commit=False)
afilter.created_by = request.user
afilter.query = form.generate_query()
afilter.save()
afilter.users.add(request.user)
messages.add_message(
request, messages.SUCCESS,
_('Advanced filter added successfully.')
)
if '_save_goto' in (request.GET or request.POST):
url = "{path}{qparams}".format(
path=request.path, qparams="?_afilter={id}".format(
id=afilter.id))
return HttpResponseRedirect(url)
elif request.method == "POST":
logger.info('Failed saving advanced filter, params: %s', form.data)
def adv_filters_handle(self, request, extra_context={}):
data = request.POST if request.POST.get(
'action') == 'advanced_filters' else None
adv_filters_form = self.advanced_filter_form(
data=data, model_admin=self, extra_form=True)
extra_context.update({
'original_change_list_template': self.original_change_list_template,
'advanced_filters': adv_filters_form,
'current_afilter': request.GET.get('_afilter'),
'app_label': self.opts.app_label,
})
return self.save_advanced_filter(request, adv_filters_form)
|
modlinltd/django-advanced-filters
|
advanced_filters/models.py
|
UserLookupManager.filter_by_user
|
python
|
def filter_by_user(self, user):
return self.filter(Q(users=user) | Q(groups__in=user.groups.all()))
|
All filters that should be displayed to a user (by users/group)
|
train
|
https://github.com/modlinltd/django-advanced-filters/blob/ba51e6946d1652796a82b2b95cceffbe1190a227/advanced_filters/models.py#L10-L13
| null |
class UserLookupManager(models.Manager):
|
modlinltd/django-advanced-filters
|
advanced_filters/models.py
|
AdvancedFilter.query
|
python
|
def query(self):
if not self.b64_query:
return None
s = QSerializer(base64=True)
return s.loads(self.b64_query)
|
De-serialize, decode and return an ORM query stored in b64_query.
|
train
|
https://github.com/modlinltd/django-advanced-filters/blob/ba51e6946d1652796a82b2b95cceffbe1190a227/advanced_filters/models.py#L39-L46
|
[
"def dumps(self, obj):\n if not isinstance(obj, Q):\n raise SerializationError\n string = json.dumps(self.serialize(obj), default=dt2ts)\n if self.b64_enabled:\n return base64.b64encode(six.b(string)).decode(\"utf-8\")\n return string\n",
"def loads(self, string, raw=False):\n if self.b64_enabled:\n d = json.loads(base64.b64decode(string))\n else:\n d = json.loads(string)\n if raw:\n return d\n return self.deserialize(d)\n"
] |
class AdvancedFilter(models.Model):
class Meta:
verbose_name = _('Advanced Filter')
verbose_name_plural = _('Advanced Filters')
title = models.CharField(max_length=255, null=False, blank=False, verbose_name=_('Title'))
created_by = models.ForeignKey(
settings.AUTH_USER_MODEL,
related_name='created_advanced_filters',
verbose_name=_('Created by'),
on_delete=models.CASCADE,
)
created_at = models.DateTimeField(auto_now_add=True, null=True, verbose_name=_('Created at'))
url = models.CharField(max_length=255, null=False, blank=False, verbose_name=_('URL'))
users = models.ManyToManyField(settings.AUTH_USER_MODEL, blank=True, verbose_name=_('Users'))
groups = models.ManyToManyField('auth.Group', blank=True, verbose_name=_('Groups'))
objects = UserLookupManager()
b64_query = models.CharField(max_length=2048)
model = models.CharField(max_length=64, blank=True, null=True)
@property
@query.setter
def query(self, value):
"""
Serialize an ORM query, Base-64 encode it and set it to
the b64_query field
"""
if not isinstance(value, Q):
raise Exception('Must only be passed a Django (Q)uery object')
s = QSerializer(base64=True)
self.b64_query = s.dumps(value)
def list_fields(self):
s = QSerializer(base64=True)
d = s.loads(self.b64_query, raw=True)
return s.get_field_values_list(d)
|
modlinltd/django-advanced-filters
|
advanced_filters/models.py
|
AdvancedFilter.query
|
python
|
def query(self, value):
if not isinstance(value, Q):
raise Exception('Must only be passed a Django (Q)uery object')
s = QSerializer(base64=True)
self.b64_query = s.dumps(value)
|
Serialize an ORM query, Base-64 encode it and set it to
the b64_query field
|
train
|
https://github.com/modlinltd/django-advanced-filters/blob/ba51e6946d1652796a82b2b95cceffbe1190a227/advanced_filters/models.py#L49-L57
|
[
"def dumps(self, obj):\n if not isinstance(obj, Q):\n raise SerializationError\n string = json.dumps(self.serialize(obj), default=dt2ts)\n if self.b64_enabled:\n return base64.b64encode(six.b(string)).decode(\"utf-8\")\n return string\n",
"def loads(self, string, raw=False):\n if self.b64_enabled:\n d = json.loads(base64.b64decode(string))\n else:\n d = json.loads(string)\n if raw:\n return d\n return self.deserialize(d)\n"
] |
class AdvancedFilter(models.Model):
class Meta:
verbose_name = _('Advanced Filter')
verbose_name_plural = _('Advanced Filters')
title = models.CharField(max_length=255, null=False, blank=False, verbose_name=_('Title'))
created_by = models.ForeignKey(
settings.AUTH_USER_MODEL,
related_name='created_advanced_filters',
verbose_name=_('Created by'),
on_delete=models.CASCADE,
)
created_at = models.DateTimeField(auto_now_add=True, null=True, verbose_name=_('Created at'))
url = models.CharField(max_length=255, null=False, blank=False, verbose_name=_('URL'))
users = models.ManyToManyField(settings.AUTH_USER_MODEL, blank=True, verbose_name=_('Users'))
groups = models.ManyToManyField('auth.Group', blank=True, verbose_name=_('Groups'))
objects = UserLookupManager()
b64_query = models.CharField(max_length=2048)
model = models.CharField(max_length=64, blank=True, null=True)
@property
def query(self):
"""
De-serialize, decode and return an ORM query stored in b64_query.
"""
if not self.b64_query:
return None
s = QSerializer(base64=True)
return s.loads(self.b64_query)
@query.setter
def list_fields(self):
s = QSerializer(base64=True)
d = s.loads(self.b64_query, raw=True)
return s.get_field_values_list(d)
|
modlinltd/django-advanced-filters
|
advanced_filters/q_serializer.py
|
QSerializer.serialize
|
python
|
def serialize(self, q):
children = []
for child in q.children:
if isinstance(child, Q):
children.append(self.serialize(child))
else:
children.append(child)
serialized = q.__dict__
serialized['children'] = children
return serialized
|
Serialize a Q object into a (possibly nested) dict.
|
train
|
https://github.com/modlinltd/django-advanced-filters/blob/ba51e6946d1652796a82b2b95cceffbe1190a227/advanced_filters/q_serializer.py#L55-L67
|
[
"def serialize(self, q):\n \"\"\"\n Serialize a Q object into a (possibly nested) dict.\n \"\"\"\n children = []\n for child in q.children:\n if isinstance(child, Q):\n children.append(self.serialize(child))\n else:\n children.append(child)\n serialized = q.__dict__\n serialized['children'] = children\n return serialized\n"
] |
class QSerializer(object):
"""
A Q object serializer base class. Pass base64=True when initializing
to Base-64 encode/decode the returned/passed string.
By default the class provides loads/dumps methods that wrap around
json serialization, but they may be easily overwritten to serialize
into other formats (i.e XML, YAML, etc...)
"""
b64_enabled = False
def __init__(self, base64=False):
if base64:
self.b64_enabled = True
@staticmethod
def _is_range(qtuple):
return qtuple[0].endswith("__range") and len(qtuple[1]) == 2
def prepare_value(self, qtuple):
if self._is_range(qtuple):
qtuple[1][0] = qtuple[1][0] or min_ts
qtuple[1][1] = qtuple[1][1] or max_ts
qtuple[1] = (datetime.fromtimestamp(qtuple[1][0]),
datetime.fromtimestamp(qtuple[1][1]))
return qtuple
def deserialize(self, d):
"""
De-serialize a Q object from a (possibly nested) dict.
"""
children = []
for child in d.pop('children'):
if isinstance(child, dict):
children.append(self.deserialize(child))
else:
children.append(self.prepare_value(child))
query = Q()
query.children = children
query.connector = d['connector']
query.negated = d['negated']
if 'subtree_parents' in d:
query.subtree_parents = d['subtree_parents']
return query
def get_field_values_list(self, d):
"""
Iterate over a (possibly nested) dict, and return a list
of all children queries, as a dict of the following structure:
{
'field': 'some_field__iexact',
'value': 'some_value',
'value_from': 'optional_range_val1',
'value_to': 'optional_range_val2',
'negate': True,
}
OR relations are expressed as an extra "line" between queries.
"""
fields = []
children = d.get('children', [])
for child in children:
if isinstance(child, dict):
fields.extend(self.get_field_values_list(child))
else:
f = {'field': child[0], 'value': child[1]}
if self._is_range(child):
f['value_from'] = child[1][0]
f['value_to'] = child[1][1]
f['negate'] = d.get('negated', False)
fields.append(f)
# add _OR line
if d['connector'] == 'OR' and children[-1] != child:
fields.append({'field': '_OR', 'value': 'null'})
return fields
def dumps(self, obj):
if not isinstance(obj, Q):
raise SerializationError
string = json.dumps(self.serialize(obj), default=dt2ts)
if self.b64_enabled:
return base64.b64encode(six.b(string)).decode("utf-8")
return string
def loads(self, string, raw=False):
if self.b64_enabled:
d = json.loads(base64.b64decode(string))
else:
d = json.loads(string)
if raw:
return d
return self.deserialize(d)
|
modlinltd/django-advanced-filters
|
advanced_filters/q_serializer.py
|
QSerializer.deserialize
|
python
|
def deserialize(self, d):
children = []
for child in d.pop('children'):
if isinstance(child, dict):
children.append(self.deserialize(child))
else:
children.append(self.prepare_value(child))
query = Q()
query.children = children
query.connector = d['connector']
query.negated = d['negated']
if 'subtree_parents' in d:
query.subtree_parents = d['subtree_parents']
return query
|
De-serialize a Q object from a (possibly nested) dict.
|
train
|
https://github.com/modlinltd/django-advanced-filters/blob/ba51e6946d1652796a82b2b95cceffbe1190a227/advanced_filters/q_serializer.py#L69-L85
|
[
"def prepare_value(self, qtuple):\n if self._is_range(qtuple):\n qtuple[1][0] = qtuple[1][0] or min_ts\n qtuple[1][1] = qtuple[1][1] or max_ts\n qtuple[1] = (datetime.fromtimestamp(qtuple[1][0]),\n datetime.fromtimestamp(qtuple[1][1]))\n return qtuple\n",
"def deserialize(self, d):\n \"\"\"\n De-serialize a Q object from a (possibly nested) dict.\n \"\"\"\n children = []\n for child in d.pop('children'):\n if isinstance(child, dict):\n children.append(self.deserialize(child))\n else:\n children.append(self.prepare_value(child))\n query = Q()\n query.children = children\n query.connector = d['connector']\n query.negated = d['negated']\n if 'subtree_parents' in d:\n query.subtree_parents = d['subtree_parents']\n return query\n"
] |
class QSerializer(object):
"""
A Q object serializer base class. Pass base64=True when initializing
to Base-64 encode/decode the returned/passed string.
By default the class provides loads/dumps methods that wrap around
json serialization, but they may be easily overwritten to serialize
into other formats (i.e XML, YAML, etc...)
"""
b64_enabled = False
def __init__(self, base64=False):
if base64:
self.b64_enabled = True
@staticmethod
def _is_range(qtuple):
return qtuple[0].endswith("__range") and len(qtuple[1]) == 2
def prepare_value(self, qtuple):
if self._is_range(qtuple):
qtuple[1][0] = qtuple[1][0] or min_ts
qtuple[1][1] = qtuple[1][1] or max_ts
qtuple[1] = (datetime.fromtimestamp(qtuple[1][0]),
datetime.fromtimestamp(qtuple[1][1]))
return qtuple
def serialize(self, q):
"""
Serialize a Q object into a (possibly nested) dict.
"""
children = []
for child in q.children:
if isinstance(child, Q):
children.append(self.serialize(child))
else:
children.append(child)
serialized = q.__dict__
serialized['children'] = children
return serialized
def get_field_values_list(self, d):
"""
Iterate over a (possibly nested) dict, and return a list
of all children queries, as a dict of the following structure:
{
'field': 'some_field__iexact',
'value': 'some_value',
'value_from': 'optional_range_val1',
'value_to': 'optional_range_val2',
'negate': True,
}
OR relations are expressed as an extra "line" between queries.
"""
fields = []
children = d.get('children', [])
for child in children:
if isinstance(child, dict):
fields.extend(self.get_field_values_list(child))
else:
f = {'field': child[0], 'value': child[1]}
if self._is_range(child):
f['value_from'] = child[1][0]
f['value_to'] = child[1][1]
f['negate'] = d.get('negated', False)
fields.append(f)
# add _OR line
if d['connector'] == 'OR' and children[-1] != child:
fields.append({'field': '_OR', 'value': 'null'})
return fields
def dumps(self, obj):
if not isinstance(obj, Q):
raise SerializationError
string = json.dumps(self.serialize(obj), default=dt2ts)
if self.b64_enabled:
return base64.b64encode(six.b(string)).decode("utf-8")
return string
def loads(self, string, raw=False):
if self.b64_enabled:
d = json.loads(base64.b64decode(string))
else:
d = json.loads(string)
if raw:
return d
return self.deserialize(d)
|
modlinltd/django-advanced-filters
|
advanced_filters/q_serializer.py
|
QSerializer.get_field_values_list
|
python
|
def get_field_values_list(self, d):
fields = []
children = d.get('children', [])
for child in children:
if isinstance(child, dict):
fields.extend(self.get_field_values_list(child))
else:
f = {'field': child[0], 'value': child[1]}
if self._is_range(child):
f['value_from'] = child[1][0]
f['value_to'] = child[1][1]
f['negate'] = d.get('negated', False)
fields.append(f)
# add _OR line
if d['connector'] == 'OR' and children[-1] != child:
fields.append({'field': '_OR', 'value': 'null'})
return fields
|
Iterate over a (possibly nested) dict, and return a list
of all children queries, as a dict of the following structure:
{
'field': 'some_field__iexact',
'value': 'some_value',
'value_from': 'optional_range_val1',
'value_to': 'optional_range_val2',
'negate': True,
}
OR relations are expressed as an extra "line" between queries.
|
train
|
https://github.com/modlinltd/django-advanced-filters/blob/ba51e6946d1652796a82b2b95cceffbe1190a227/advanced_filters/q_serializer.py#L87-L117
|
[
"def _is_range(qtuple):\n return qtuple[0].endswith(\"__range\") and len(qtuple[1]) == 2\n",
"def get_field_values_list(self, d):\n \"\"\"\n Iterate over a (possibly nested) dict, and return a list\n of all children queries, as a dict of the following structure:\n {\n 'field': 'some_field__iexact',\n 'value': 'some_value',\n 'value_from': 'optional_range_val1',\n 'value_to': 'optional_range_val2',\n 'negate': True,\n }\n\n OR relations are expressed as an extra \"line\" between queries.\n \"\"\"\n fields = []\n children = d.get('children', [])\n for child in children:\n if isinstance(child, dict):\n fields.extend(self.get_field_values_list(child))\n else:\n f = {'field': child[0], 'value': child[1]}\n if self._is_range(child):\n f['value_from'] = child[1][0]\n f['value_to'] = child[1][1]\n f['negate'] = d.get('negated', False)\n fields.append(f)\n\n # add _OR line\n if d['connector'] == 'OR' and children[-1] != child:\n fields.append({'field': '_OR', 'value': 'null'})\n return fields\n"
] |
class QSerializer(object):
"""
A Q object serializer base class. Pass base64=True when initializing
to Base-64 encode/decode the returned/passed string.
By default the class provides loads/dumps methods that wrap around
json serialization, but they may be easily overwritten to serialize
into other formats (i.e XML, YAML, etc...)
"""
b64_enabled = False
def __init__(self, base64=False):
if base64:
self.b64_enabled = True
@staticmethod
def _is_range(qtuple):
return qtuple[0].endswith("__range") and len(qtuple[1]) == 2
def prepare_value(self, qtuple):
if self._is_range(qtuple):
qtuple[1][0] = qtuple[1][0] or min_ts
qtuple[1][1] = qtuple[1][1] or max_ts
qtuple[1] = (datetime.fromtimestamp(qtuple[1][0]),
datetime.fromtimestamp(qtuple[1][1]))
return qtuple
def serialize(self, q):
"""
Serialize a Q object into a (possibly nested) dict.
"""
children = []
for child in q.children:
if isinstance(child, Q):
children.append(self.serialize(child))
else:
children.append(child)
serialized = q.__dict__
serialized['children'] = children
return serialized
def deserialize(self, d):
"""
De-serialize a Q object from a (possibly nested) dict.
"""
children = []
for child in d.pop('children'):
if isinstance(child, dict):
children.append(self.deserialize(child))
else:
children.append(self.prepare_value(child))
query = Q()
query.children = children
query.connector = d['connector']
query.negated = d['negated']
if 'subtree_parents' in d:
query.subtree_parents = d['subtree_parents']
return query
def dumps(self, obj):
if not isinstance(obj, Q):
raise SerializationError
string = json.dumps(self.serialize(obj), default=dt2ts)
if self.b64_enabled:
return base64.b64encode(six.b(string)).decode("utf-8")
return string
def loads(self, string, raw=False):
if self.b64_enabled:
d = json.loads(base64.b64decode(string))
else:
d = json.loads(string)
if raw:
return d
return self.deserialize(d)
|
modlinltd/django-advanced-filters
|
advanced_filters/form_helpers.py
|
VaryingTypeCharField.to_python
|
python
|
def to_python(self, value):
res = super(VaryingTypeCharField, self).to_python(value)
split_res = res.split(self._default_separator)
if not res or len(split_res) < 2:
return res.strip()
# create a regex string out of the list of choices passed, i.e: (a|b)
res = r"({pattern})".format(pattern="|".join(
map(lambda x: x.strip(), split_res)))
return res
|
Split a string value by separator (default to ",") into a
list; then, returns a regex pattern string that ORs the values
in the resulting list.
>>> field = VaryingTypeCharField()
>>> assert field.to_python('') == ''
>>> assert field.to_python('test') == 'test'
>>> assert field.to_python('and,me') == '(and|me)'
>>> assert field.to_python('and,me;too') == '(and|me;too)'
|
train
|
https://github.com/modlinltd/django-advanced-filters/blob/ba51e6946d1652796a82b2b95cceffbe1190a227/advanced_filters/form_helpers.py#L20-L40
| null |
class VaryingTypeCharField(forms.CharField):
"""
This CharField subclass returns a regex OR patterns from a
comma separated list value.
"""
_default_separator = ","
|
modlinltd/django-advanced-filters
|
advanced_filters/form_helpers.py
|
CleanWhiteSpacesMixin.clean
|
python
|
def clean(self):
cleaned_data = super(CleanWhiteSpacesMixin, self).clean()
for k in self.cleaned_data:
if isinstance(self.cleaned_data[k], six.string_types):
cleaned_data[k] = re.sub(extra_spaces_pattern, ' ',
self.cleaned_data[k] or '').strip()
return cleaned_data
|
>>> import django.forms
>>> class MyForm(CleanWhiteSpacesMixin, django.forms.Form):
... some_field = django.forms.CharField()
>>>
>>> form = MyForm({'some_field': ' a weird value '})
>>> assert form.is_valid()
>>> assert form.cleaned_data == {'some_field': 'a weird value'}
|
train
|
https://github.com/modlinltd/django-advanced-filters/blob/ba51e6946d1652796a82b2b95cceffbe1190a227/advanced_filters/form_helpers.py#L48-L63
| null |
class CleanWhiteSpacesMixin(object):
"""
This mixin, when added to any form subclass, adds a clean method which
strips repeating spaces in and around each string value of "clean_data".
"""
|
modlinltd/django-advanced-filters
|
advanced_filters/forms.py
|
AdvancedFilterQueryForm._build_field_choices
|
python
|
def _build_field_choices(self, fields):
return tuple(sorted(
[(fquery, capfirst(fname)) for fquery, fname in fields.items()],
key=lambda f: f[1].lower())
) + self.FIELD_CHOICES
|
Iterate over passed model fields tuple and update initial choices.
|
train
|
https://github.com/modlinltd/django-advanced-filters/blob/ba51e6946d1652796a82b2b95cceffbe1190a227/advanced_filters/forms.py#L77-L84
| null |
class AdvancedFilterQueryForm(CleanWhiteSpacesMixin, forms.Form):
""" Build the query from field, operator and value """
OPERATORS = (
("iexact", _("Equals")),
("icontains", _("Contains")),
("iregex", _("One of")),
("range", _("DateTime Range")),
("isnull", _("Is NULL")),
("istrue", _("Is TRUE")),
("isfalse", _("Is FALSE")),
("lt", _("Less Than")),
("gt", _("Greater Than")),
("lte", _("Less Than or Equal To")),
("gte", _("Greater Than or Equal To")),
)
FIELD_CHOICES = (
("_OR", _("Or (mark an or between blocks)")),
)
field = forms.ChoiceField(required=True, widget=forms.Select(
attrs={'class': 'query-field'}), label=_('Field'))
operator = forms.ChoiceField(
label=_('Operator'),
required=True, choices=OPERATORS, initial="iexact",
widget=forms.Select(attrs={'class': 'query-operator'}))
value = VaryingTypeCharField(required=True, widget=forms.TextInput(
attrs={'class': 'query-value'}), label=_('Value'))
value_from = forms.DateTimeField(widget=forms.HiddenInput(
attrs={'class': 'query-dt-from'}), required=False)
value_to = forms.DateTimeField(widget=forms.HiddenInput(
attrs={'class': 'query-dt-to'}), required=False)
negate = forms.BooleanField(initial=False, required=False, label=_('Negate'))
def _build_query_dict(self, formdata=None):
"""
Take submitted data from form and create a query dict to be
used in a Q object (or filter)
"""
if self.is_valid() and formdata is None:
formdata = self.cleaned_data
key = "{field}__{operator}".format(**formdata)
if formdata['operator'] == "isnull":
return {key: None}
elif formdata['operator'] == "istrue":
return {formdata['field']: True}
elif formdata['operator'] == "isfalse":
return {formdata['field']: False}
return {key: formdata['value']}
@staticmethod
def _parse_query_dict(query_data, model):
"""
Take a list of query field dict and return data for form initialization
"""
operator = 'iexact'
if query_data['field'] == '_OR':
query_data['operator'] = operator
return query_data
parts = query_data['field'].split('__')
if len(parts) < 2:
field = parts[0]
else:
if parts[-1] in dict(AdvancedFilterQueryForm.OPERATORS).keys():
field = '__'.join(parts[:-1])
operator = parts[-1]
else:
field = query_data['field']
query_data['field'] = field
mfield = get_fields_from_path(model, query_data['field'])
if not mfield:
raise Exception('Field path "%s" could not be followed to a field'
' in model %s', query_data['field'], model)
else:
mfield = mfield[-1] # get the field object
if query_data['value'] is None:
query_data['operator'] = "isnull"
elif query_data['value'] is True:
query_data['operator'] = "istrue"
elif query_data['value'] is False:
query_data['operator'] = "isfalse"
else:
if isinstance(mfield, DateField):
# this is a date/datetime field
query_data['operator'] = "range" # default
else:
query_data['operator'] = operator # default
if isinstance(query_data.get('value'),
list) and query_data['operator'] == 'range':
date_from = date_to_string(query_data.get('value_from'))
date_to = date_to_string(query_data.get('value_to'))
query_data['value'] = ','.join([date_from, date_to])
return query_data
def set_range_value(self, data):
"""
Validates date range by parsing into 2 datetime objects and
validating them both.
"""
dtfrom = data.pop('value_from')
dtto = data.pop('value_to')
if dtfrom is dtto is None:
self.errors['value'] = ['Date range requires values']
raise forms.ValidationError([])
data['value'] = (dtfrom, dtto)
def clean(self):
cleaned_data = super(AdvancedFilterQueryForm, self).clean()
if cleaned_data.get('operator') == "range":
if ('value_from' in cleaned_data and
'value_to' in cleaned_data):
self.set_range_value(cleaned_data)
return cleaned_data
def make_query(self, *args, **kwargs):
""" Returns a Q object from the submitted form """
query = Q() # initial is an empty query
query_dict = self._build_query_dict(self.cleaned_data)
if 'negate' in self.cleaned_data and self.cleaned_data['negate']:
query = query & ~Q(**query_dict)
else:
query = query & Q(**query_dict)
return query
def __init__(self, model_fields={}, *args, **kwargs):
super(AdvancedFilterQueryForm, self).__init__(*args, **kwargs)
self.FIELD_CHOICES = self._build_field_choices(model_fields)
self.fields['field'].choices = self.FIELD_CHOICES
if not self.fields['field'].initial:
self.fields['field'].initial = self.FIELD_CHOICES[0]
|
modlinltd/django-advanced-filters
|
advanced_filters/forms.py
|
AdvancedFilterQueryForm._build_query_dict
|
python
|
def _build_query_dict(self, formdata=None):
if self.is_valid() and formdata is None:
formdata = self.cleaned_data
key = "{field}__{operator}".format(**formdata)
if formdata['operator'] == "isnull":
return {key: None}
elif formdata['operator'] == "istrue":
return {formdata['field']: True}
elif formdata['operator'] == "isfalse":
return {formdata['field']: False}
return {key: formdata['value']}
|
Take submitted data from form and create a query dict to be
used in a Q object (or filter)
|
train
|
https://github.com/modlinltd/django-advanced-filters/blob/ba51e6946d1652796a82b2b95cceffbe1190a227/advanced_filters/forms.py#L86-L100
| null |
class AdvancedFilterQueryForm(CleanWhiteSpacesMixin, forms.Form):
""" Build the query from field, operator and value """
OPERATORS = (
("iexact", _("Equals")),
("icontains", _("Contains")),
("iregex", _("One of")),
("range", _("DateTime Range")),
("isnull", _("Is NULL")),
("istrue", _("Is TRUE")),
("isfalse", _("Is FALSE")),
("lt", _("Less Than")),
("gt", _("Greater Than")),
("lte", _("Less Than or Equal To")),
("gte", _("Greater Than or Equal To")),
)
FIELD_CHOICES = (
("_OR", _("Or (mark an or between blocks)")),
)
field = forms.ChoiceField(required=True, widget=forms.Select(
attrs={'class': 'query-field'}), label=_('Field'))
operator = forms.ChoiceField(
label=_('Operator'),
required=True, choices=OPERATORS, initial="iexact",
widget=forms.Select(attrs={'class': 'query-operator'}))
value = VaryingTypeCharField(required=True, widget=forms.TextInput(
attrs={'class': 'query-value'}), label=_('Value'))
value_from = forms.DateTimeField(widget=forms.HiddenInput(
attrs={'class': 'query-dt-from'}), required=False)
value_to = forms.DateTimeField(widget=forms.HiddenInput(
attrs={'class': 'query-dt-to'}), required=False)
negate = forms.BooleanField(initial=False, required=False, label=_('Negate'))
def _build_field_choices(self, fields):
"""
Iterate over passed model fields tuple and update initial choices.
"""
return tuple(sorted(
[(fquery, capfirst(fname)) for fquery, fname in fields.items()],
key=lambda f: f[1].lower())
) + self.FIELD_CHOICES
@staticmethod
def _parse_query_dict(query_data, model):
"""
Take a list of query field dict and return data for form initialization
"""
operator = 'iexact'
if query_data['field'] == '_OR':
query_data['operator'] = operator
return query_data
parts = query_data['field'].split('__')
if len(parts) < 2:
field = parts[0]
else:
if parts[-1] in dict(AdvancedFilterQueryForm.OPERATORS).keys():
field = '__'.join(parts[:-1])
operator = parts[-1]
else:
field = query_data['field']
query_data['field'] = field
mfield = get_fields_from_path(model, query_data['field'])
if not mfield:
raise Exception('Field path "%s" could not be followed to a field'
' in model %s', query_data['field'], model)
else:
mfield = mfield[-1] # get the field object
if query_data['value'] is None:
query_data['operator'] = "isnull"
elif query_data['value'] is True:
query_data['operator'] = "istrue"
elif query_data['value'] is False:
query_data['operator'] = "isfalse"
else:
if isinstance(mfield, DateField):
# this is a date/datetime field
query_data['operator'] = "range" # default
else:
query_data['operator'] = operator # default
if isinstance(query_data.get('value'),
list) and query_data['operator'] == 'range':
date_from = date_to_string(query_data.get('value_from'))
date_to = date_to_string(query_data.get('value_to'))
query_data['value'] = ','.join([date_from, date_to])
return query_data
def set_range_value(self, data):
"""
Validates date range by parsing into 2 datetime objects and
validating them both.
"""
dtfrom = data.pop('value_from')
dtto = data.pop('value_to')
if dtfrom is dtto is None:
self.errors['value'] = ['Date range requires values']
raise forms.ValidationError([])
data['value'] = (dtfrom, dtto)
def clean(self):
cleaned_data = super(AdvancedFilterQueryForm, self).clean()
if cleaned_data.get('operator') == "range":
if ('value_from' in cleaned_data and
'value_to' in cleaned_data):
self.set_range_value(cleaned_data)
return cleaned_data
def make_query(self, *args, **kwargs):
""" Returns a Q object from the submitted form """
query = Q() # initial is an empty query
query_dict = self._build_query_dict(self.cleaned_data)
if 'negate' in self.cleaned_data and self.cleaned_data['negate']:
query = query & ~Q(**query_dict)
else:
query = query & Q(**query_dict)
return query
def __init__(self, model_fields={}, *args, **kwargs):
super(AdvancedFilterQueryForm, self).__init__(*args, **kwargs)
self.FIELD_CHOICES = self._build_field_choices(model_fields)
self.fields['field'].choices = self.FIELD_CHOICES
if not self.fields['field'].initial:
self.fields['field'].initial = self.FIELD_CHOICES[0]
|
modlinltd/django-advanced-filters
|
advanced_filters/forms.py
|
AdvancedFilterQueryForm._parse_query_dict
|
python
|
def _parse_query_dict(query_data, model):
operator = 'iexact'
if query_data['field'] == '_OR':
query_data['operator'] = operator
return query_data
parts = query_data['field'].split('__')
if len(parts) < 2:
field = parts[0]
else:
if parts[-1] in dict(AdvancedFilterQueryForm.OPERATORS).keys():
field = '__'.join(parts[:-1])
operator = parts[-1]
else:
field = query_data['field']
query_data['field'] = field
mfield = get_fields_from_path(model, query_data['field'])
if not mfield:
raise Exception('Field path "%s" could not be followed to a field'
' in model %s', query_data['field'], model)
else:
mfield = mfield[-1] # get the field object
if query_data['value'] is None:
query_data['operator'] = "isnull"
elif query_data['value'] is True:
query_data['operator'] = "istrue"
elif query_data['value'] is False:
query_data['operator'] = "isfalse"
else:
if isinstance(mfield, DateField):
# this is a date/datetime field
query_data['operator'] = "range" # default
else:
query_data['operator'] = operator # default
if isinstance(query_data.get('value'),
list) and query_data['operator'] == 'range':
date_from = date_to_string(query_data.get('value_from'))
date_to = date_to_string(query_data.get('value_to'))
query_data['value'] = ','.join([date_from, date_to])
return query_data
|
Take a list of query field dict and return data for form initialization
|
train
|
https://github.com/modlinltd/django-advanced-filters/blob/ba51e6946d1652796a82b2b95cceffbe1190a227/advanced_filters/forms.py#L103-L149
| null |
class AdvancedFilterQueryForm(CleanWhiteSpacesMixin, forms.Form):
""" Build the query from field, operator and value """
OPERATORS = (
("iexact", _("Equals")),
("icontains", _("Contains")),
("iregex", _("One of")),
("range", _("DateTime Range")),
("isnull", _("Is NULL")),
("istrue", _("Is TRUE")),
("isfalse", _("Is FALSE")),
("lt", _("Less Than")),
("gt", _("Greater Than")),
("lte", _("Less Than or Equal To")),
("gte", _("Greater Than or Equal To")),
)
FIELD_CHOICES = (
("_OR", _("Or (mark an or between blocks)")),
)
field = forms.ChoiceField(required=True, widget=forms.Select(
attrs={'class': 'query-field'}), label=_('Field'))
operator = forms.ChoiceField(
label=_('Operator'),
required=True, choices=OPERATORS, initial="iexact",
widget=forms.Select(attrs={'class': 'query-operator'}))
value = VaryingTypeCharField(required=True, widget=forms.TextInput(
attrs={'class': 'query-value'}), label=_('Value'))
value_from = forms.DateTimeField(widget=forms.HiddenInput(
attrs={'class': 'query-dt-from'}), required=False)
value_to = forms.DateTimeField(widget=forms.HiddenInput(
attrs={'class': 'query-dt-to'}), required=False)
negate = forms.BooleanField(initial=False, required=False, label=_('Negate'))
def _build_field_choices(self, fields):
"""
Iterate over passed model fields tuple and update initial choices.
"""
return tuple(sorted(
[(fquery, capfirst(fname)) for fquery, fname in fields.items()],
key=lambda f: f[1].lower())
) + self.FIELD_CHOICES
def _build_query_dict(self, formdata=None):
"""
Take submitted data from form and create a query dict to be
used in a Q object (or filter)
"""
if self.is_valid() and formdata is None:
formdata = self.cleaned_data
key = "{field}__{operator}".format(**formdata)
if formdata['operator'] == "isnull":
return {key: None}
elif formdata['operator'] == "istrue":
return {formdata['field']: True}
elif formdata['operator'] == "isfalse":
return {formdata['field']: False}
return {key: formdata['value']}
@staticmethod
def set_range_value(self, data):
"""
Validates date range by parsing into 2 datetime objects and
validating them both.
"""
dtfrom = data.pop('value_from')
dtto = data.pop('value_to')
if dtfrom is dtto is None:
self.errors['value'] = ['Date range requires values']
raise forms.ValidationError([])
data['value'] = (dtfrom, dtto)
def clean(self):
cleaned_data = super(AdvancedFilterQueryForm, self).clean()
if cleaned_data.get('operator') == "range":
if ('value_from' in cleaned_data and
'value_to' in cleaned_data):
self.set_range_value(cleaned_data)
return cleaned_data
def make_query(self, *args, **kwargs):
""" Returns a Q object from the submitted form """
query = Q() # initial is an empty query
query_dict = self._build_query_dict(self.cleaned_data)
if 'negate' in self.cleaned_data and self.cleaned_data['negate']:
query = query & ~Q(**query_dict)
else:
query = query & Q(**query_dict)
return query
def __init__(self, model_fields={}, *args, **kwargs):
super(AdvancedFilterQueryForm, self).__init__(*args, **kwargs)
self.FIELD_CHOICES = self._build_field_choices(model_fields)
self.fields['field'].choices = self.FIELD_CHOICES
if not self.fields['field'].initial:
self.fields['field'].initial = self.FIELD_CHOICES[0]
|
modlinltd/django-advanced-filters
|
advanced_filters/forms.py
|
AdvancedFilterQueryForm.set_range_value
|
python
|
def set_range_value(self, data):
dtfrom = data.pop('value_from')
dtto = data.pop('value_to')
if dtfrom is dtto is None:
self.errors['value'] = ['Date range requires values']
raise forms.ValidationError([])
data['value'] = (dtfrom, dtto)
|
Validates date range by parsing into 2 datetime objects and
validating them both.
|
train
|
https://github.com/modlinltd/django-advanced-filters/blob/ba51e6946d1652796a82b2b95cceffbe1190a227/advanced_filters/forms.py#L151-L161
| null |
class AdvancedFilterQueryForm(CleanWhiteSpacesMixin, forms.Form):
""" Build the query from field, operator and value """
OPERATORS = (
("iexact", _("Equals")),
("icontains", _("Contains")),
("iregex", _("One of")),
("range", _("DateTime Range")),
("isnull", _("Is NULL")),
("istrue", _("Is TRUE")),
("isfalse", _("Is FALSE")),
("lt", _("Less Than")),
("gt", _("Greater Than")),
("lte", _("Less Than or Equal To")),
("gte", _("Greater Than or Equal To")),
)
FIELD_CHOICES = (
("_OR", _("Or (mark an or between blocks)")),
)
field = forms.ChoiceField(required=True, widget=forms.Select(
attrs={'class': 'query-field'}), label=_('Field'))
operator = forms.ChoiceField(
label=_('Operator'),
required=True, choices=OPERATORS, initial="iexact",
widget=forms.Select(attrs={'class': 'query-operator'}))
value = VaryingTypeCharField(required=True, widget=forms.TextInput(
attrs={'class': 'query-value'}), label=_('Value'))
value_from = forms.DateTimeField(widget=forms.HiddenInput(
attrs={'class': 'query-dt-from'}), required=False)
value_to = forms.DateTimeField(widget=forms.HiddenInput(
attrs={'class': 'query-dt-to'}), required=False)
negate = forms.BooleanField(initial=False, required=False, label=_('Negate'))
def _build_field_choices(self, fields):
"""
Iterate over passed model fields tuple and update initial choices.
"""
return tuple(sorted(
[(fquery, capfirst(fname)) for fquery, fname in fields.items()],
key=lambda f: f[1].lower())
) + self.FIELD_CHOICES
def _build_query_dict(self, formdata=None):
"""
Take submitted data from form and create a query dict to be
used in a Q object (or filter)
"""
if self.is_valid() and formdata is None:
formdata = self.cleaned_data
key = "{field}__{operator}".format(**formdata)
if formdata['operator'] == "isnull":
return {key: None}
elif formdata['operator'] == "istrue":
return {formdata['field']: True}
elif formdata['operator'] == "isfalse":
return {formdata['field']: False}
return {key: formdata['value']}
@staticmethod
def _parse_query_dict(query_data, model):
"""
Take a list of query field dict and return data for form initialization
"""
operator = 'iexact'
if query_data['field'] == '_OR':
query_data['operator'] = operator
return query_data
parts = query_data['field'].split('__')
if len(parts) < 2:
field = parts[0]
else:
if parts[-1] in dict(AdvancedFilterQueryForm.OPERATORS).keys():
field = '__'.join(parts[:-1])
operator = parts[-1]
else:
field = query_data['field']
query_data['field'] = field
mfield = get_fields_from_path(model, query_data['field'])
if not mfield:
raise Exception('Field path "%s" could not be followed to a field'
' in model %s', query_data['field'], model)
else:
mfield = mfield[-1] # get the field object
if query_data['value'] is None:
query_data['operator'] = "isnull"
elif query_data['value'] is True:
query_data['operator'] = "istrue"
elif query_data['value'] is False:
query_data['operator'] = "isfalse"
else:
if isinstance(mfield, DateField):
# this is a date/datetime field
query_data['operator'] = "range" # default
else:
query_data['operator'] = operator # default
if isinstance(query_data.get('value'),
list) and query_data['operator'] == 'range':
date_from = date_to_string(query_data.get('value_from'))
date_to = date_to_string(query_data.get('value_to'))
query_data['value'] = ','.join([date_from, date_to])
return query_data
def clean(self):
cleaned_data = super(AdvancedFilterQueryForm, self).clean()
if cleaned_data.get('operator') == "range":
if ('value_from' in cleaned_data and
'value_to' in cleaned_data):
self.set_range_value(cleaned_data)
return cleaned_data
def make_query(self, *args, **kwargs):
""" Returns a Q object from the submitted form """
query = Q() # initial is an empty query
query_dict = self._build_query_dict(self.cleaned_data)
if 'negate' in self.cleaned_data and self.cleaned_data['negate']:
query = query & ~Q(**query_dict)
else:
query = query & Q(**query_dict)
return query
def __init__(self, model_fields={}, *args, **kwargs):
super(AdvancedFilterQueryForm, self).__init__(*args, **kwargs)
self.FIELD_CHOICES = self._build_field_choices(model_fields)
self.fields['field'].choices = self.FIELD_CHOICES
if not self.fields['field'].initial:
self.fields['field'].initial = self.FIELD_CHOICES[0]
|
modlinltd/django-advanced-filters
|
advanced_filters/forms.py
|
AdvancedFilterQueryForm.make_query
|
python
|
def make_query(self, *args, **kwargs):
query = Q() # initial is an empty query
query_dict = self._build_query_dict(self.cleaned_data)
if 'negate' in self.cleaned_data and self.cleaned_data['negate']:
query = query & ~Q(**query_dict)
else:
query = query & Q(**query_dict)
return query
|
Returns a Q object from the submitted form
|
train
|
https://github.com/modlinltd/django-advanced-filters/blob/ba51e6946d1652796a82b2b95cceffbe1190a227/advanced_filters/forms.py#L171-L179
|
[
"def _build_query_dict(self, formdata=None):\n \"\"\"\n Take submitted data from form and create a query dict to be\n used in a Q object (or filter)\n \"\"\"\n if self.is_valid() and formdata is None:\n formdata = self.cleaned_data\n key = \"{field}__{operator}\".format(**formdata)\n if formdata['operator'] == \"isnull\":\n return {key: None}\n elif formdata['operator'] == \"istrue\":\n return {formdata['field']: True}\n elif formdata['operator'] == \"isfalse\":\n return {formdata['field']: False}\n return {key: formdata['value']}\n"
] |
class AdvancedFilterQueryForm(CleanWhiteSpacesMixin, forms.Form):
""" Build the query from field, operator and value """
OPERATORS = (
("iexact", _("Equals")),
("icontains", _("Contains")),
("iregex", _("One of")),
("range", _("DateTime Range")),
("isnull", _("Is NULL")),
("istrue", _("Is TRUE")),
("isfalse", _("Is FALSE")),
("lt", _("Less Than")),
("gt", _("Greater Than")),
("lte", _("Less Than or Equal To")),
("gte", _("Greater Than or Equal To")),
)
FIELD_CHOICES = (
("_OR", _("Or (mark an or between blocks)")),
)
field = forms.ChoiceField(required=True, widget=forms.Select(
attrs={'class': 'query-field'}), label=_('Field'))
operator = forms.ChoiceField(
label=_('Operator'),
required=True, choices=OPERATORS, initial="iexact",
widget=forms.Select(attrs={'class': 'query-operator'}))
value = VaryingTypeCharField(required=True, widget=forms.TextInput(
attrs={'class': 'query-value'}), label=_('Value'))
value_from = forms.DateTimeField(widget=forms.HiddenInput(
attrs={'class': 'query-dt-from'}), required=False)
value_to = forms.DateTimeField(widget=forms.HiddenInput(
attrs={'class': 'query-dt-to'}), required=False)
negate = forms.BooleanField(initial=False, required=False, label=_('Negate'))
def _build_field_choices(self, fields):
"""
Iterate over passed model fields tuple and update initial choices.
"""
return tuple(sorted(
[(fquery, capfirst(fname)) for fquery, fname in fields.items()],
key=lambda f: f[1].lower())
) + self.FIELD_CHOICES
def _build_query_dict(self, formdata=None):
"""
Take submitted data from form and create a query dict to be
used in a Q object (or filter)
"""
if self.is_valid() and formdata is None:
formdata = self.cleaned_data
key = "{field}__{operator}".format(**formdata)
if formdata['operator'] == "isnull":
return {key: None}
elif formdata['operator'] == "istrue":
return {formdata['field']: True}
elif formdata['operator'] == "isfalse":
return {formdata['field']: False}
return {key: formdata['value']}
@staticmethod
def _parse_query_dict(query_data, model):
"""
Take a list of query field dict and return data for form initialization
"""
operator = 'iexact'
if query_data['field'] == '_OR':
query_data['operator'] = operator
return query_data
parts = query_data['field'].split('__')
if len(parts) < 2:
field = parts[0]
else:
if parts[-1] in dict(AdvancedFilterQueryForm.OPERATORS).keys():
field = '__'.join(parts[:-1])
operator = parts[-1]
else:
field = query_data['field']
query_data['field'] = field
mfield = get_fields_from_path(model, query_data['field'])
if not mfield:
raise Exception('Field path "%s" could not be followed to a field'
' in model %s', query_data['field'], model)
else:
mfield = mfield[-1] # get the field object
if query_data['value'] is None:
query_data['operator'] = "isnull"
elif query_data['value'] is True:
query_data['operator'] = "istrue"
elif query_data['value'] is False:
query_data['operator'] = "isfalse"
else:
if isinstance(mfield, DateField):
# this is a date/datetime field
query_data['operator'] = "range" # default
else:
query_data['operator'] = operator # default
if isinstance(query_data.get('value'),
list) and query_data['operator'] == 'range':
date_from = date_to_string(query_data.get('value_from'))
date_to = date_to_string(query_data.get('value_to'))
query_data['value'] = ','.join([date_from, date_to])
return query_data
def set_range_value(self, data):
"""
Validates date range by parsing into 2 datetime objects and
validating them both.
"""
dtfrom = data.pop('value_from')
dtto = data.pop('value_to')
if dtfrom is dtto is None:
self.errors['value'] = ['Date range requires values']
raise forms.ValidationError([])
data['value'] = (dtfrom, dtto)
def clean(self):
cleaned_data = super(AdvancedFilterQueryForm, self).clean()
if cleaned_data.get('operator') == "range":
if ('value_from' in cleaned_data and
'value_to' in cleaned_data):
self.set_range_value(cleaned_data)
return cleaned_data
def __init__(self, model_fields={}, *args, **kwargs):
super(AdvancedFilterQueryForm, self).__init__(*args, **kwargs)
self.FIELD_CHOICES = self._build_field_choices(model_fields)
self.fields['field'].choices = self.FIELD_CHOICES
if not self.fields['field'].initial:
self.fields['field'].initial = self.FIELD_CHOICES[0]
|
modlinltd/django-advanced-filters
|
advanced_filters/forms.py
|
AdvancedFilterForm.get_fields_from_model
|
python
|
def get_fields_from_model(self, model, fields):
model_fields = {}
for field in fields:
if isinstance(field, tuple) and len(field) == 2:
field, verbose_name = field[0], field[1]
else:
try:
model_field = get_fields_from_path(model, field)[-1]
verbose_name = model_field.verbose_name
except (FieldDoesNotExist, IndexError, TypeError) as e:
logger.warn("AdvancedFilterForm: skip invalid field "
"- %s", e)
continue
model_fields[field] = verbose_name
return model_fields
|
Iterate over given <field> names (in "orm query" notation) and find
the actual field given the initial <model>.
If <field> is a tuple of the format ('field_name', 'Verbose name'),
overwrite the field's verbose name with the given name for display
purposes.
|
train
|
https://github.com/modlinltd/django-advanced-filters/blob/ba51e6946d1652796a82b2b95cceffbe1190a227/advanced_filters/forms.py#L245-L267
| null |
class AdvancedFilterForm(CleanWhiteSpacesMixin, forms.ModelForm):
""" Form to save/edit advanced filter forms """
class Meta:
model = AdvancedFilter
fields = ('title',)
class Media:
required_js = [
'admin/js/%sjquery.min.js' % ('vendor/jquery/' if USE_VENDOR_DIR else ''),
'advanced-filters/jquery_adder.js',
'orig_inlines%s.js' % ('' if settings.DEBUG else '.min'),
'magnific-popup/jquery.magnific-popup.js',
'advanced-filters/advanced-filters.js',
]
js = required_js + [SELECT2_JS]
css = {'screen': [
SELECT2_CSS,
'advanced-filters/advanced-filters.css',
'magnific-popup/magnific-popup.css'
]}
def __init__(self, *args, **kwargs):
model_admin = kwargs.pop('model_admin', None)
instance = kwargs.get('instance')
extra_form = kwargs.pop('extra_form', False)
# TODO: allow all fields to be determined by model
filter_fields = kwargs.pop('filter_fields', None)
if model_admin:
self._model = model_admin.model
elif instance and instance.model:
# get existing instance model
self._model = apps.get_model(*instance.model.split('.'))
try:
model_admin = admin.site._registry[self._model]
except KeyError:
logger.debug('No ModelAdmin registered for %s', self._model)
else:
raise Exception('Adding new AdvancedFilter from admin is '
'not supported')
self._filter_fields = filter_fields or getattr(
model_admin, 'advanced_filter_fields', ())
super(AdvancedFilterForm, self).__init__(*args, **kwargs)
# populate existing or empty forms formset
data = None
if len(args):
data = args[0]
elif kwargs.get('data'):
data = kwargs.get('data')
self.initialize_form(instance, self._model, data, extra_form)
def clean(self):
cleaned_data = super(AdvancedFilterForm, self).clean()
if not self.fields_formset.is_valid():
logger.debug(
"Errors validating advanced query filters: %s",
pformat([(f.errors, f.non_field_errors())
for f in self.fields_formset.forms]))
raise forms.ValidationError("Error validating filter forms")
cleaned_data['model'] = "%s.%s" % (self._model._meta.app_label,
self._model._meta.object_name)
return cleaned_data
@property
def _non_deleted_forms(self):
forms = []
for form in self.fields_formset.forms:
if form in self.fields_formset.deleted_forms:
continue # skip deleted forms when generating query
forms.append(form)
return forms
def generate_query(self):
""" Reduces multiple queries into a single usable query """
query = Q()
ORed = []
for form in self._non_deleted_forms:
if not hasattr(form, 'cleaned_data'):
continue
if form.cleaned_data['field'] == "_OR":
ORed.append(query)
query = Q()
else:
query = query & form.make_query()
if ORed:
if query: # add last query for OR if any
ORed.append(query)
query = reduce(operator.or_, ORed)
return query
def initialize_form(self, instance, model, data=None, extra=None):
""" Takes a "finalized" query and generate it's form data """
model_fields = self.get_fields_from_model(model, self._filter_fields)
forms = []
if instance:
for field_data in instance.list_fields():
forms.append(
AdvancedFilterQueryForm._parse_query_dict(
field_data, model))
formset = AFQFormSetNoExtra if not extra else AFQFormSet
self.fields_formset = formset(
data=data,
initial=forms or None,
model_fields=model_fields
)
def save(self, commit=True):
self.instance.query = self.generate_query()
self.instance.model = self.cleaned_data.get('model')
return super(AdvancedFilterForm, self).save(commit)
|
modlinltd/django-advanced-filters
|
advanced_filters/forms.py
|
AdvancedFilterForm.generate_query
|
python
|
def generate_query(self):
query = Q()
ORed = []
for form in self._non_deleted_forms:
if not hasattr(form, 'cleaned_data'):
continue
if form.cleaned_data['field'] == "_OR":
ORed.append(query)
query = Q()
else:
query = query & form.make_query()
if ORed:
if query: # add last query for OR if any
ORed.append(query)
query = reduce(operator.or_, ORed)
return query
|
Reduces multiple queries into a single usable query
|
train
|
https://github.com/modlinltd/django-advanced-filters/blob/ba51e6946d1652796a82b2b95cceffbe1190a227/advanced_filters/forms.py#L322-L338
| null |
class AdvancedFilterForm(CleanWhiteSpacesMixin, forms.ModelForm):
""" Form to save/edit advanced filter forms """
class Meta:
model = AdvancedFilter
fields = ('title',)
class Media:
required_js = [
'admin/js/%sjquery.min.js' % ('vendor/jquery/' if USE_VENDOR_DIR else ''),
'advanced-filters/jquery_adder.js',
'orig_inlines%s.js' % ('' if settings.DEBUG else '.min'),
'magnific-popup/jquery.magnific-popup.js',
'advanced-filters/advanced-filters.js',
]
js = required_js + [SELECT2_JS]
css = {'screen': [
SELECT2_CSS,
'advanced-filters/advanced-filters.css',
'magnific-popup/magnific-popup.css'
]}
def get_fields_from_model(self, model, fields):
"""
Iterate over given <field> names (in "orm query" notation) and find
the actual field given the initial <model>.
If <field> is a tuple of the format ('field_name', 'Verbose name'),
overwrite the field's verbose name with the given name for display
purposes.
"""
model_fields = {}
for field in fields:
if isinstance(field, tuple) and len(field) == 2:
field, verbose_name = field[0], field[1]
else:
try:
model_field = get_fields_from_path(model, field)[-1]
verbose_name = model_field.verbose_name
except (FieldDoesNotExist, IndexError, TypeError) as e:
logger.warn("AdvancedFilterForm: skip invalid field "
"- %s", e)
continue
model_fields[field] = verbose_name
return model_fields
def __init__(self, *args, **kwargs):
model_admin = kwargs.pop('model_admin', None)
instance = kwargs.get('instance')
extra_form = kwargs.pop('extra_form', False)
# TODO: allow all fields to be determined by model
filter_fields = kwargs.pop('filter_fields', None)
if model_admin:
self._model = model_admin.model
elif instance and instance.model:
# get existing instance model
self._model = apps.get_model(*instance.model.split('.'))
try:
model_admin = admin.site._registry[self._model]
except KeyError:
logger.debug('No ModelAdmin registered for %s', self._model)
else:
raise Exception('Adding new AdvancedFilter from admin is '
'not supported')
self._filter_fields = filter_fields or getattr(
model_admin, 'advanced_filter_fields', ())
super(AdvancedFilterForm, self).__init__(*args, **kwargs)
# populate existing or empty forms formset
data = None
if len(args):
data = args[0]
elif kwargs.get('data'):
data = kwargs.get('data')
self.initialize_form(instance, self._model, data, extra_form)
def clean(self):
cleaned_data = super(AdvancedFilterForm, self).clean()
if not self.fields_formset.is_valid():
logger.debug(
"Errors validating advanced query filters: %s",
pformat([(f.errors, f.non_field_errors())
for f in self.fields_formset.forms]))
raise forms.ValidationError("Error validating filter forms")
cleaned_data['model'] = "%s.%s" % (self._model._meta.app_label,
self._model._meta.object_name)
return cleaned_data
@property
def _non_deleted_forms(self):
forms = []
for form in self.fields_formset.forms:
if form in self.fields_formset.deleted_forms:
continue # skip deleted forms when generating query
forms.append(form)
return forms
def initialize_form(self, instance, model, data=None, extra=None):
""" Takes a "finalized" query and generate it's form data """
model_fields = self.get_fields_from_model(model, self._filter_fields)
forms = []
if instance:
for field_data in instance.list_fields():
forms.append(
AdvancedFilterQueryForm._parse_query_dict(
field_data, model))
formset = AFQFormSetNoExtra if not extra else AFQFormSet
self.fields_formset = formset(
data=data,
initial=forms or None,
model_fields=model_fields
)
def save(self, commit=True):
self.instance.query = self.generate_query()
self.instance.model = self.cleaned_data.get('model')
return super(AdvancedFilterForm, self).save(commit)
|
modlinltd/django-advanced-filters
|
advanced_filters/forms.py
|
AdvancedFilterForm.initialize_form
|
python
|
def initialize_form(self, instance, model, data=None, extra=None):
model_fields = self.get_fields_from_model(model, self._filter_fields)
forms = []
if instance:
for field_data in instance.list_fields():
forms.append(
AdvancedFilterQueryForm._parse_query_dict(
field_data, model))
formset = AFQFormSetNoExtra if not extra else AFQFormSet
self.fields_formset = formset(
data=data,
initial=forms or None,
model_fields=model_fields
)
|
Takes a "finalized" query and generate it's form data
|
train
|
https://github.com/modlinltd/django-advanced-filters/blob/ba51e6946d1652796a82b2b95cceffbe1190a227/advanced_filters/forms.py#L340-L356
|
[
"def _parse_query_dict(query_data, model):\n \"\"\"\n Take a list of query field dict and return data for form initialization\n \"\"\"\n operator = 'iexact'\n if query_data['field'] == '_OR':\n query_data['operator'] = operator\n return query_data\n\n parts = query_data['field'].split('__')\n if len(parts) < 2:\n field = parts[0]\n else:\n if parts[-1] in dict(AdvancedFilterQueryForm.OPERATORS).keys():\n field = '__'.join(parts[:-1])\n operator = parts[-1]\n else:\n field = query_data['field']\n\n query_data['field'] = field\n mfield = get_fields_from_path(model, query_data['field'])\n if not mfield:\n raise Exception('Field path \"%s\" could not be followed to a field'\n ' in model %s', query_data['field'], model)\n else:\n mfield = mfield[-1] # get the field object\n\n if query_data['value'] is None:\n query_data['operator'] = \"isnull\"\n elif query_data['value'] is True:\n query_data['operator'] = \"istrue\"\n elif query_data['value'] is False:\n query_data['operator'] = \"isfalse\"\n else:\n if isinstance(mfield, DateField):\n # this is a date/datetime field\n query_data['operator'] = \"range\" # default\n else:\n query_data['operator'] = operator # default\n\n if isinstance(query_data.get('value'),\n list) and query_data['operator'] == 'range':\n date_from = date_to_string(query_data.get('value_from'))\n date_to = date_to_string(query_data.get('value_to'))\n query_data['value'] = ','.join([date_from, date_to])\n\n return query_data\n",
"def get_fields_from_model(self, model, fields):\n \"\"\"\n Iterate over given <field> names (in \"orm query\" notation) and find\n the actual field given the initial <model>.\n\n If <field> is a tuple of the format ('field_name', 'Verbose name'),\n overwrite the field's verbose name with the given name for display\n purposes.\n \"\"\"\n model_fields = {}\n for field in fields:\n if isinstance(field, tuple) and len(field) == 2:\n field, verbose_name = field[0], field[1]\n else:\n try:\n model_field = get_fields_from_path(model, field)[-1]\n verbose_name = model_field.verbose_name\n except (FieldDoesNotExist, IndexError, TypeError) as e:\n logger.warn(\"AdvancedFilterForm: skip invalid field \"\n \"- %s\", e)\n continue\n model_fields[field] = verbose_name\n return model_fields\n"
] |
class AdvancedFilterForm(CleanWhiteSpacesMixin, forms.ModelForm):
""" Form to save/edit advanced filter forms """
class Meta:
model = AdvancedFilter
fields = ('title',)
class Media:
required_js = [
'admin/js/%sjquery.min.js' % ('vendor/jquery/' if USE_VENDOR_DIR else ''),
'advanced-filters/jquery_adder.js',
'orig_inlines%s.js' % ('' if settings.DEBUG else '.min'),
'magnific-popup/jquery.magnific-popup.js',
'advanced-filters/advanced-filters.js',
]
js = required_js + [SELECT2_JS]
css = {'screen': [
SELECT2_CSS,
'advanced-filters/advanced-filters.css',
'magnific-popup/magnific-popup.css'
]}
def get_fields_from_model(self, model, fields):
"""
Iterate over given <field> names (in "orm query" notation) and find
the actual field given the initial <model>.
If <field> is a tuple of the format ('field_name', 'Verbose name'),
overwrite the field's verbose name with the given name for display
purposes.
"""
model_fields = {}
for field in fields:
if isinstance(field, tuple) and len(field) == 2:
field, verbose_name = field[0], field[1]
else:
try:
model_field = get_fields_from_path(model, field)[-1]
verbose_name = model_field.verbose_name
except (FieldDoesNotExist, IndexError, TypeError) as e:
logger.warn("AdvancedFilterForm: skip invalid field "
"- %s", e)
continue
model_fields[field] = verbose_name
return model_fields
def __init__(self, *args, **kwargs):
model_admin = kwargs.pop('model_admin', None)
instance = kwargs.get('instance')
extra_form = kwargs.pop('extra_form', False)
# TODO: allow all fields to be determined by model
filter_fields = kwargs.pop('filter_fields', None)
if model_admin:
self._model = model_admin.model
elif instance and instance.model:
# get existing instance model
self._model = apps.get_model(*instance.model.split('.'))
try:
model_admin = admin.site._registry[self._model]
except KeyError:
logger.debug('No ModelAdmin registered for %s', self._model)
else:
raise Exception('Adding new AdvancedFilter from admin is '
'not supported')
self._filter_fields = filter_fields or getattr(
model_admin, 'advanced_filter_fields', ())
super(AdvancedFilterForm, self).__init__(*args, **kwargs)
# populate existing or empty forms formset
data = None
if len(args):
data = args[0]
elif kwargs.get('data'):
data = kwargs.get('data')
self.initialize_form(instance, self._model, data, extra_form)
def clean(self):
cleaned_data = super(AdvancedFilterForm, self).clean()
if not self.fields_formset.is_valid():
logger.debug(
"Errors validating advanced query filters: %s",
pformat([(f.errors, f.non_field_errors())
for f in self.fields_formset.forms]))
raise forms.ValidationError("Error validating filter forms")
cleaned_data['model'] = "%s.%s" % (self._model._meta.app_label,
self._model._meta.object_name)
return cleaned_data
@property
def _non_deleted_forms(self):
forms = []
for form in self.fields_formset.forms:
if form in self.fields_formset.deleted_forms:
continue # skip deleted forms when generating query
forms.append(form)
return forms
def generate_query(self):
""" Reduces multiple queries into a single usable query """
query = Q()
ORed = []
for form in self._non_deleted_forms:
if not hasattr(form, 'cleaned_data'):
continue
if form.cleaned_data['field'] == "_OR":
ORed.append(query)
query = Q()
else:
query = query & form.make_query()
if ORed:
if query: # add last query for OR if any
ORed.append(query)
query = reduce(operator.or_, ORed)
return query
def save(self, commit=True):
self.instance.query = self.generate_query()
self.instance.model = self.cleaned_data.get('model')
return super(AdvancedFilterForm, self).save(commit)
|
jrspruitt/ubi_reader
|
ubireader/ubi_io.py
|
ubi_file.read_block
|
python
|
def read_block(self, block):
self.seek(block.file_offset)
return self._fhandle.read(block.size)
|
Read complete PEB data from file.
Argument:
Obj:block -- Block data is desired for.
|
train
|
https://github.com/jrspruitt/ubi_reader/blob/7079dd380c1c9896bced30d6d34e8780b9181597/ubireader/ubi_io.py#L149-L156
|
[
"def seek(self, offset):\n self._fhandle.seek(offset)\n"
] |
class ubi_file(object):
"""UBI image file object
Arguments:
Str:path -- Path to file to parse
Int:block_size -- Erase block size of NAND in bytes.
Int:start_offset -- (optional) Where to start looking in the file for
UBI data.
Int:end_offset -- (optional) Where to stop looking in the file.
Methods:
seek -- Put file head to specified byte offset.
Int:offset
read -- Read specified bytes from file handle.
Int:size
tell -- Returns byte offset of current file location.
read_block -- Returns complete PEB data of provided block
description.
Obj:block
read_block_data -- Returns LEB data only from provided block.
Obj:block
reader -- Generator that returns data from file.
reset -- Reset file position to start_offset.
is_valid -- If the object intialized okay.
Handles all the actual file interactions, read, seek,
extract blocks, etc.
"""
def __init__(self, path, block_size, start_offset=0, end_offset=None):
self.__name__ = 'UBI_File'
self.is_valid = False
try:
log(self, 'Open Path: %s' % path)
self._fhandle = open(path, 'rb')
except Exception as e:
error(self, 'Fatal', 'Open file: %s' % e)
self._fhandle.seek(0,2)
file_size = self.tell()
log(self, 'File Size: %s' % file_size)
self._start_offset = start_offset
log(self, 'Start Offset: %s' % (self._start_offset))
if end_offset:
self._end_offset = end_offset
else:
self._end_offset = file_size
log(self, 'End Offset: %s' % (self._end_offset))
self._block_size = block_size
log(self, 'Block Size: %s' % block_size)
if start_offset > self._end_offset:
error(self, 'Fatal', 'Start offset larger than end offset.')
if ( not end_offset is None ) and ( end_offset > file_size ):
error(self, 'Fatal', 'End offset larger than file size.')
self._fhandle.seek(self._start_offset)
self._last_read_addr = self._fhandle.tell()
self.is_valid = True
def _set_start(self, i):
self._start_offset = i
def _get_start(self):
return self._start_offset
start_offset = property(_get_start, _set_start)
def _get_end(self):
return self._end_offset
end_offset = property(_get_end)
def _get_block_size(self):
return self._block_size
block_size = property(_get_block_size)
def seek(self, offset):
self._fhandle.seek(offset)
def read(self, size):
if self.end_offset < self.tell() + size:
error(self.read, 'Error', 'Block ends at %s which is greater than file size %s' % (self.tell() + size, self.end_offset))
raise Exception('Bad Read Offset Request')
self._last_read_addr = self.tell()
verbose_log(self, 'read loc: %s, size: %s' % (self._last_read_addr, size))
return self._fhandle.read(size)
def tell(self):
return self._fhandle.tell()
def last_read_addr(self):
return self._last_read_addr
def reset(self):
self._fhandle.seek(self.start_offset)
def reader(self):
self.reset()
while True:
cur_loc = self._fhandle.tell()
if self.end_offset and cur_loc > self.end_offset:
break
elif self.end_offset and self.end_offset - cur_loc < self.block_size:
chunk_size = self.end_offset - cur_loc
else:
chunk_size = self.block_size
buf = self.read(chunk_size)
if not buf:
break
yield buf
def read_block(self, block):
"""Read complete PEB data from file.
Argument:
Obj:block -- Block data is desired for.
"""
self.seek(block.file_offset)
return self._fhandle.read(block.size)
def read_block_data(self, block):
"""Read LEB data from file
Argument:
Obj:block -- Block data is desired for.
"""
self.seek(block.file_offset + block.ec_hdr.data_offset)
buf = self._fhandle.read(block.size - block.ec_hdr.data_offset - block.vid_hdr.data_pad)
return buf
|
jrspruitt/ubi_reader
|
ubireader/ubi_io.py
|
ubi_file.read_block_data
|
python
|
def read_block_data(self, block):
self.seek(block.file_offset + block.ec_hdr.data_offset)
buf = self._fhandle.read(block.size - block.ec_hdr.data_offset - block.vid_hdr.data_pad)
return buf
|
Read LEB data from file
Argument:
Obj:block -- Block data is desired for.
|
train
|
https://github.com/jrspruitt/ubi_reader/blob/7079dd380c1c9896bced30d6d34e8780b9181597/ubireader/ubi_io.py#L159-L167
|
[
"def seek(self, offset):\n self._fhandle.seek(offset)\n"
] |
class ubi_file(object):
"""UBI image file object
Arguments:
Str:path -- Path to file to parse
Int:block_size -- Erase block size of NAND in bytes.
Int:start_offset -- (optional) Where to start looking in the file for
UBI data.
Int:end_offset -- (optional) Where to stop looking in the file.
Methods:
seek -- Put file head to specified byte offset.
Int:offset
read -- Read specified bytes from file handle.
Int:size
tell -- Returns byte offset of current file location.
read_block -- Returns complete PEB data of provided block
description.
Obj:block
read_block_data -- Returns LEB data only from provided block.
Obj:block
reader -- Generator that returns data from file.
reset -- Reset file position to start_offset.
is_valid -- If the object intialized okay.
Handles all the actual file interactions, read, seek,
extract blocks, etc.
"""
def __init__(self, path, block_size, start_offset=0, end_offset=None):
self.__name__ = 'UBI_File'
self.is_valid = False
try:
log(self, 'Open Path: %s' % path)
self._fhandle = open(path, 'rb')
except Exception as e:
error(self, 'Fatal', 'Open file: %s' % e)
self._fhandle.seek(0,2)
file_size = self.tell()
log(self, 'File Size: %s' % file_size)
self._start_offset = start_offset
log(self, 'Start Offset: %s' % (self._start_offset))
if end_offset:
self._end_offset = end_offset
else:
self._end_offset = file_size
log(self, 'End Offset: %s' % (self._end_offset))
self._block_size = block_size
log(self, 'Block Size: %s' % block_size)
if start_offset > self._end_offset:
error(self, 'Fatal', 'Start offset larger than end offset.')
if ( not end_offset is None ) and ( end_offset > file_size ):
error(self, 'Fatal', 'End offset larger than file size.')
self._fhandle.seek(self._start_offset)
self._last_read_addr = self._fhandle.tell()
self.is_valid = True
def _set_start(self, i):
self._start_offset = i
def _get_start(self):
return self._start_offset
start_offset = property(_get_start, _set_start)
def _get_end(self):
return self._end_offset
end_offset = property(_get_end)
def _get_block_size(self):
return self._block_size
block_size = property(_get_block_size)
def seek(self, offset):
self._fhandle.seek(offset)
def read(self, size):
if self.end_offset < self.tell() + size:
error(self.read, 'Error', 'Block ends at %s which is greater than file size %s' % (self.tell() + size, self.end_offset))
raise Exception('Bad Read Offset Request')
self._last_read_addr = self.tell()
verbose_log(self, 'read loc: %s, size: %s' % (self._last_read_addr, size))
return self._fhandle.read(size)
def tell(self):
return self._fhandle.tell()
def last_read_addr(self):
return self._last_read_addr
def reset(self):
self._fhandle.seek(self.start_offset)
def reader(self):
self.reset()
while True:
cur_loc = self._fhandle.tell()
if self.end_offset and cur_loc > self.end_offset:
break
elif self.end_offset and self.end_offset - cur_loc < self.block_size:
chunk_size = self.end_offset - cur_loc
else:
chunk_size = self.block_size
buf = self.read(chunk_size)
if not buf:
break
yield buf
def read_block(self, block):
"""Read complete PEB data from file.
Argument:
Obj:block -- Block data is desired for.
"""
self.seek(block.file_offset)
return self._fhandle.read(block.size)
def read_block_data(self, block):
"""Read LEB data from file
Argument:
Obj:block -- Block data is desired for.
"""
self.seek(block.file_offset + block.ec_hdr.data_offset)
buf = self._fhandle.read(block.size - block.ec_hdr.data_offset - block.vid_hdr.data_pad)
return buf
|
jrspruitt/ubi_reader
|
ubireader/ubi/block/__init__.py
|
extract_blocks
|
python
|
def extract_blocks(ubi):
blocks = {}
ubi.file.seek(ubi.file.start_offset)
peb_count = 0
cur_offset = 0
bad_blocks = []
# range instead of xrange, as xrange breaks > 4GB end_offset.
for i in range(ubi.file.start_offset, ubi.file.end_offset, ubi.file.block_size):
try:
buf = ubi.file.read(ubi.file.block_size)
except Exception as e:
if settings.warn_only_block_read_errors:
error(extract_blocks, 'Error', 'PEB: %s: %s' % (ubi.first_peb_num + peb_count, str(e)))
continue
else:
error(extract_blocks, 'Fatal', 'PEB: %s: %s' % (ubi.first_peb_num + peb_count, str(e)))
if buf.startswith(UBI_EC_HDR_MAGIC):
blk = description(buf)
blk.file_offset = i
blk.peb_num = ubi.first_peb_num + peb_count
blk.size = ubi.file.block_size
blocks[blk.peb_num] = blk
peb_count += 1
log(extract_blocks, blk)
verbose_log(extract_blocks, 'file addr: %s' % (ubi.file.last_read_addr()))
ec_hdr_errors = ''
vid_hdr_errors = ''
if blk.ec_hdr.errors:
ec_hdr_errors = ','.join(blk.ec_hdr.errors)
if blk.vid_hdr and blk.vid_hdr.errors:
vid_hdr_errors = ','.join(blk.vid_hdr.errors)
if ec_hdr_errors or vid_hdr_errors:
if blk.peb_num not in bad_blocks:
bad_blocks.append(blk.peb_num)
log(extract_blocks, 'PEB: %s has possible issue EC_HDR [%s], VID_HDR [%s]' % (blk.peb_num, ec_hdr_errors, vid_hdr_errors))
verbose_display(blk)
else:
cur_offset += ubi.file.block_size
ubi.first_peb_num = cur_offset/ubi.file.block_size
ubi.file.start_offset = cur_offset
return blocks
|
Get a list of UBI block objects from file
Arguments:.
Obj:ubi -- UBI object.
Returns:
Dict -- Of block objects keyed by PEB number.
|
train
|
https://github.com/jrspruitt/ubi_reader/blob/7079dd380c1c9896bced30d6d34e8780b9181597/ubireader/ubi/block/__init__.py#L105-L162
|
[
"def error(obj, level, message):\n if settings.error_action is 'exit':\n print('{} {}: {}'.format(obj.__name__, level, message))\n if settings.fatal_traceback:\n traceback.print_exc()\n sys.exit(1)\n\n else:\n if level.lower() == 'warn':\n print('{} {}: {}'.format(obj.__name__, level, message))\n elif level.lower() == 'fatal':\n print('{} {}: {}'.format(obj.__name__, level, message))\n if settings.fatal_traceback:\n traceback.print_exc()\n sys.exit(1)\n else:\n print('{} {}: {}'.format(obj.__name__, level, message))\n",
"def log(obj, message):\n if settings.logging_on or settings.logging_on_verbose:\n print('{} {}'.format(obj.__name__, message))\n",
"def verbose_display(displayable_obj):\n if settings.logging_on_verbose:\n print(displayable_obj.display('\\t'))\n",
"def verbose_log(obj, message):\n if settings.logging_on_verbose:\n log(obj, message)\n"
] |
#!/usr/bin/env python
#############################################################
# ubi_reader/ubi
# (c) 2013 Jason Pruitt (jrspruitt@gmail.com)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#############################################################
import re
from ubireader import settings
from ubireader.debug import error, log, verbose_display, verbose_log
from ubireader.ubi import display
from ubireader.ubi.defines import UBI_EC_HDR_SZ, UBI_VID_HDR_SZ, UBI_INTERNAL_VOL_START, UBI_EC_HDR_MAGIC
from ubireader.ubi.headers import ec_hdr, vid_hdr, vtbl_recs
class description(object):
"""UBI Block description Object
UBI Specifications:
http://www.linux-mtd.infradead.org/ -- Home page
<kernel>/drivers/mtd/ubi/ubi-media.h -- Header structs
and defines
Attributes:
Obj:ec_hdr -- Error Count Header
Obj:vid_hdr -- Volume ID Header
List:vtbl_recs -- (Optional) List of Volume Table Records.
Bool:is_vtbl -- If contains volume records table.
Bool:is_internal_vol -- If Vol ID is > UBI_INTERNAL_VOL_START
Bool:is_valid -- If ec_hdr & vid_hdr are error free.
Int:peb_num -- Physical Erase Block number.
Int:leb_num -- Logical Erase Block number.
Int:file_offset -- Address location in file of this block.
Int:size -- Size of total block data or PEB size.
Will print out all information when invoked as a string.
"""
def __init__(self, block_buf):
self.file_offset = -1
self.peb_num = -1
self.leb_num = -1
self.size = -1
self.vid_hdr = None
self.is_internal_vol = False
self.vtbl_recs = []
# TODO better understanding of block types/errors
self.ec_hdr = ec_hdr(block_buf[0:UBI_EC_HDR_SZ])
if not self.ec_hdr.errors or settings.ignore_block_header_errors:
self.vid_hdr = vid_hdr(block_buf[self.ec_hdr.vid_hdr_offset:self.ec_hdr.vid_hdr_offset+UBI_VID_HDR_SZ])
if not self.vid_hdr.errors or settings.ignore_block_header_errors:
self.is_internal_vol = self.vid_hdr.vol_id >= UBI_INTERNAL_VOL_START
if self.vid_hdr.vol_id >= UBI_INTERNAL_VOL_START:
self.vtbl_recs = vtbl_recs(block_buf[self.ec_hdr.data_offset:])
self.leb_num = self.vid_hdr.lnum
self.is_vtbl = bool(self.vtbl_recs) or False
self.is_valid = not self.ec_hdr.errors and not self.vid_hdr.errors or settings.ignore_block_header_errors
def __repr__(self):
return 'Block: PEB# %s: LEB# %s' % (self.peb_num, self.leb_num)
def display(self, tab=''):
return display.block(self, tab)
def get_blocks_in_list(blocks, idx_list):
"""Retrieve block objects in list of indexes
Arguments:
List:blocks -- List of block objects
List:idx_list -- List of block indexes
Returns:
Dict:blocks -- List of block objects generated
from provided list of indexes in
order of idx_list.
"""
return {i:blocks[i] for i in idx_list}
def extract_blocks(ubi):
"""Get a list of UBI block objects from file
Arguments:.
Obj:ubi -- UBI object.
Returns:
Dict -- Of block objects keyed by PEB number.
"""
blocks = {}
ubi.file.seek(ubi.file.start_offset)
peb_count = 0
cur_offset = 0
bad_blocks = []
# range instead of xrange, as xrange breaks > 4GB end_offset.
for i in range(ubi.file.start_offset, ubi.file.end_offset, ubi.file.block_size):
try:
buf = ubi.file.read(ubi.file.block_size)
except Exception as e:
if settings.warn_only_block_read_errors:
error(extract_blocks, 'Error', 'PEB: %s: %s' % (ubi.first_peb_num + peb_count, str(e)))
continue
else:
error(extract_blocks, 'Fatal', 'PEB: %s: %s' % (ubi.first_peb_num + peb_count, str(e)))
if buf.startswith(UBI_EC_HDR_MAGIC):
blk = description(buf)
blk.file_offset = i
blk.peb_num = ubi.first_peb_num + peb_count
blk.size = ubi.file.block_size
blocks[blk.peb_num] = blk
peb_count += 1
log(extract_blocks, blk)
verbose_log(extract_blocks, 'file addr: %s' % (ubi.file.last_read_addr()))
ec_hdr_errors = ''
vid_hdr_errors = ''
if blk.ec_hdr.errors:
ec_hdr_errors = ','.join(blk.ec_hdr.errors)
if blk.vid_hdr and blk.vid_hdr.errors:
vid_hdr_errors = ','.join(blk.vid_hdr.errors)
if ec_hdr_errors or vid_hdr_errors:
if blk.peb_num not in bad_blocks:
bad_blocks.append(blk.peb_num)
log(extract_blocks, 'PEB: %s has possible issue EC_HDR [%s], VID_HDR [%s]' % (blk.peb_num, ec_hdr_errors, vid_hdr_errors))
verbose_display(blk)
else:
cur_offset += ubi.file.block_size
ubi.first_peb_num = cur_offset/ubi.file.block_size
ubi.file.start_offset = cur_offset
return blocks
|
jrspruitt/ubi_reader
|
ubireader/ubi/block/sort.py
|
by_image_seq
|
python
|
def by_image_seq(blocks, image_seq):
return list(filter(lambda block: blocks[block].ec_hdr.image_seq == image_seq, blocks))
|
Filter blocks to return only those associated with the provided image_seq number.
Argument:
List:blocks -- List of block objects to sort.
Int:image_seq -- image_seq number found in ec_hdr.
Returns:
List -- List of block indexes matching image_seq number.
|
train
|
https://github.com/jrspruitt/ubi_reader/blob/7079dd380c1c9896bced30d6d34e8780b9181597/ubireader/ubi/block/sort.py#L20-L30
| null |
#!/usr/bin/env python
#############################################################
# ubi_reader/ubi
# (c) 2013 Jason Pruitt (jrspruitt@gmail.com)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#############################################################
def by_image_seq(blocks, image_seq):
"""Filter blocks to return only those associated with the provided image_seq number.
Argument:
List:blocks -- List of block objects to sort.
Int:image_seq -- image_seq number found in ec_hdr.
Returns:
List -- List of block indexes matching image_seq number.
"""
return list(filter(lambda block: blocks[block].ec_hdr.image_seq == image_seq, blocks))
def by_leb(blocks):
"""Sort blocks by Logical Erase Block number.
Arguments:
List:blocks -- List of block objects to sort.
Returns:
List -- Indexes of blocks sorted by LEB.
"""
slist_len = len(blocks)
slist = ['x'] * slist_len
for block in blocks:
if blocks[block].leb_num >= slist_len:
add_elements = blocks[block].leb_num - slist_len + 1
slist += (['x'] * add_elements)
slist_len = len(slist)
slist[blocks[block].leb_num] = block
return slist
def by_vol_id(blocks, slist=None):
"""Sort blocks by volume id
Arguments:
Obj:blocks -- List of block objects.
List:slist -- (optional) List of block indexes.
Return:
Dict -- blocks grouped in lists with dict key as volume id.
"""
vol_blocks = {}
# sort block by volume
# not reliable with multiple partitions (fifo)
for i in blocks:
if slist and i not in slist:
continue
elif not blocks[i].is_valid:
continue
if blocks[i].vid_hdr.vol_id not in vol_blocks:
vol_blocks[blocks[i].vid_hdr.vol_id] = []
vol_blocks[blocks[i].vid_hdr.vol_id].append(blocks[i].peb_num)
return vol_blocks
def by_type(blocks, slist=None):
"""Sort blocks into layout, internal volume, data or unknown
Arguments:
Obj:blocks -- List of block objects.
List:slist -- (optional) List of block indexes.
Returns:
List:layout -- List of block indexes of blocks containing the
volume table records.
List:data -- List of block indexes containing filesystem data.
List:int_vol -- List of block indexes containing volume ids
greater than UBI_INTERNAL_VOL_START that are not
layout volumes.
List:unknown -- List of block indexes of blocks that failed validation
of crc in ed_hdr or vid_hdr.
"""
layout = []
data = []
int_vol = []
unknown = []
for i in blocks:
if slist and i not in slist:
continue
if blocks[i].is_vtbl and blocks[i].is_valid:
layout.append(i)
elif blocks[i].is_internal_vol and blocks[i].is_valid:
int_vol.append(i)
elif blocks[i].is_valid:
data.append(i)
else:
unknown.append(i)
return layout, data, int_vol, unknown
|
jrspruitt/ubi_reader
|
ubireader/ubi/block/sort.py
|
by_leb
|
python
|
def by_leb(blocks):
"""Sort blocks by Logical Erase Block number.
Arguments:
List:blocks -- List of block objects to sort.
Returns:
List -- Indexes of blocks sorted by LEB.
"""
slist_len = len(blocks)
slist = ['x'] * slist_len
for block in blocks:
if blocks[block].leb_num >= slist_len:
add_elements = blocks[block].leb_num - slist_len + 1
slist += (['x'] * add_elements)
slist_len = len(slist)
slist[blocks[block].leb_num] = block
return slist
|
Sort blocks by Logical Erase Block number.
Arguments:
List:blocks -- List of block objects to sort.
Returns:
List -- Indexes of blocks sorted by LEB.
|
train
|
https://github.com/jrspruitt/ubi_reader/blob/7079dd380c1c9896bced30d6d34e8780b9181597/ubireader/ubi/block/sort.py#L32-L52
| null |
#!/usr/bin/env python
#############################################################
# ubi_reader/ubi
# (c) 2013 Jason Pruitt (jrspruitt@gmail.com)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#############################################################
def by_image_seq(blocks, image_seq):
"""Filter blocks to return only those associated with the provided image_seq number.
Argument:
List:blocks -- List of block objects to sort.
Int:image_seq -- image_seq number found in ec_hdr.
Returns:
List -- List of block indexes matching image_seq number.
"""
return list(filter(lambda block: blocks[block].ec_hdr.image_seq == image_seq, blocks))
def by_leb(blocks):
"""Sort blocks by Logical Erase Block number.
Arguments:
List:blocks -- List of block objects to sort.
Returns:
List -- Indexes of blocks sorted by LEB.
"""
slist_len = len(blocks)
slist = ['x'] * slist_len
for block in blocks:
if blocks[block].leb_num >= slist_len:
add_elements = blocks[block].leb_num - slist_len + 1
slist += (['x'] * add_elements)
slist_len = len(slist)
slist[blocks[block].leb_num] = block
return slist
def by_vol_id(blocks, slist=None):
"""Sort blocks by volume id
Arguments:
Obj:blocks -- List of block objects.
List:slist -- (optional) List of block indexes.
Return:
Dict -- blocks grouped in lists with dict key as volume id.
"""
vol_blocks = {}
# sort block by volume
# not reliable with multiple partitions (fifo)
for i in blocks:
if slist and i not in slist:
continue
elif not blocks[i].is_valid:
continue
if blocks[i].vid_hdr.vol_id not in vol_blocks:
vol_blocks[blocks[i].vid_hdr.vol_id] = []
vol_blocks[blocks[i].vid_hdr.vol_id].append(blocks[i].peb_num)
return vol_blocks
def by_type(blocks, slist=None):
"""Sort blocks into layout, internal volume, data or unknown
Arguments:
Obj:blocks -- List of block objects.
List:slist -- (optional) List of block indexes.
Returns:
List:layout -- List of block indexes of blocks containing the
volume table records.
List:data -- List of block indexes containing filesystem data.
List:int_vol -- List of block indexes containing volume ids
greater than UBI_INTERNAL_VOL_START that are not
layout volumes.
List:unknown -- List of block indexes of blocks that failed validation
of crc in ed_hdr or vid_hdr.
"""
layout = []
data = []
int_vol = []
unknown = []
for i in blocks:
if slist and i not in slist:
continue
if blocks[i].is_vtbl and blocks[i].is_valid:
layout.append(i)
elif blocks[i].is_internal_vol and blocks[i].is_valid:
int_vol.append(i)
elif blocks[i].is_valid:
data.append(i)
else:
unknown.append(i)
return layout, data, int_vol, unknown
|
jrspruitt/ubi_reader
|
ubireader/ubi/block/sort.py
|
by_vol_id
|
python
|
def by_vol_id(blocks, slist=None):
vol_blocks = {}
# sort block by volume
# not reliable with multiple partitions (fifo)
for i in blocks:
if slist and i not in slist:
continue
elif not blocks[i].is_valid:
continue
if blocks[i].vid_hdr.vol_id not in vol_blocks:
vol_blocks[blocks[i].vid_hdr.vol_id] = []
vol_blocks[blocks[i].vid_hdr.vol_id].append(blocks[i].peb_num)
return vol_blocks
|
Sort blocks by volume id
Arguments:
Obj:blocks -- List of block objects.
List:slist -- (optional) List of block indexes.
Return:
Dict -- blocks grouped in lists with dict key as volume id.
|
train
|
https://github.com/jrspruitt/ubi_reader/blob/7079dd380c1c9896bced30d6d34e8780b9181597/ubireader/ubi/block/sort.py#L55-L82
| null |
#!/usr/bin/env python
#############################################################
# ubi_reader/ubi
# (c) 2013 Jason Pruitt (jrspruitt@gmail.com)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#############################################################
def by_image_seq(blocks, image_seq):
"""Filter blocks to return only those associated with the provided image_seq number.
Argument:
List:blocks -- List of block objects to sort.
Int:image_seq -- image_seq number found in ec_hdr.
Returns:
List -- List of block indexes matching image_seq number.
"""
return list(filter(lambda block: blocks[block].ec_hdr.image_seq == image_seq, blocks))
def by_leb(blocks):
"""Sort blocks by Logical Erase Block number.
Arguments:
List:blocks -- List of block objects to sort.
Returns:
List -- Indexes of blocks sorted by LEB.
"""
slist_len = len(blocks)
slist = ['x'] * slist_len
for block in blocks:
if blocks[block].leb_num >= slist_len:
add_elements = blocks[block].leb_num - slist_len + 1
slist += (['x'] * add_elements)
slist_len = len(slist)
slist[blocks[block].leb_num] = block
return slist
def by_type(blocks, slist=None):
"""Sort blocks into layout, internal volume, data or unknown
Arguments:
Obj:blocks -- List of block objects.
List:slist -- (optional) List of block indexes.
Returns:
List:layout -- List of block indexes of blocks containing the
volume table records.
List:data -- List of block indexes containing filesystem data.
List:int_vol -- List of block indexes containing volume ids
greater than UBI_INTERNAL_VOL_START that are not
layout volumes.
List:unknown -- List of block indexes of blocks that failed validation
of crc in ed_hdr or vid_hdr.
"""
layout = []
data = []
int_vol = []
unknown = []
for i in blocks:
if slist and i not in slist:
continue
if blocks[i].is_vtbl and blocks[i].is_valid:
layout.append(i)
elif blocks[i].is_internal_vol and blocks[i].is_valid:
int_vol.append(i)
elif blocks[i].is_valid:
data.append(i)
else:
unknown.append(i)
return layout, data, int_vol, unknown
|
jrspruitt/ubi_reader
|
ubireader/ubi/block/sort.py
|
by_type
|
python
|
def by_type(blocks, slist=None):
layout = []
data = []
int_vol = []
unknown = []
for i in blocks:
if slist and i not in slist:
continue
if blocks[i].is_vtbl and blocks[i].is_valid:
layout.append(i)
elif blocks[i].is_internal_vol and blocks[i].is_valid:
int_vol.append(i)
elif blocks[i].is_valid:
data.append(i)
else:
unknown.append(i)
return layout, data, int_vol, unknown
|
Sort blocks into layout, internal volume, data or unknown
Arguments:
Obj:blocks -- List of block objects.
List:slist -- (optional) List of block indexes.
Returns:
List:layout -- List of block indexes of blocks containing the
volume table records.
List:data -- List of block indexes containing filesystem data.
List:int_vol -- List of block indexes containing volume ids
greater than UBI_INTERNAL_VOL_START that are not
layout volumes.
List:unknown -- List of block indexes of blocks that failed validation
of crc in ed_hdr or vid_hdr.
|
train
|
https://github.com/jrspruitt/ubi_reader/blob/7079dd380c1c9896bced30d6d34e8780b9181597/ubireader/ubi/block/sort.py#L84-L123
| null |
#!/usr/bin/env python
#############################################################
# ubi_reader/ubi
# (c) 2013 Jason Pruitt (jrspruitt@gmail.com)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#############################################################
def by_image_seq(blocks, image_seq):
"""Filter blocks to return only those associated with the provided image_seq number.
Argument:
List:blocks -- List of block objects to sort.
Int:image_seq -- image_seq number found in ec_hdr.
Returns:
List -- List of block indexes matching image_seq number.
"""
return list(filter(lambda block: blocks[block].ec_hdr.image_seq == image_seq, blocks))
def by_leb(blocks):
"""Sort blocks by Logical Erase Block number.
Arguments:
List:blocks -- List of block objects to sort.
Returns:
List -- Indexes of blocks sorted by LEB.
"""
slist_len = len(blocks)
slist = ['x'] * slist_len
for block in blocks:
if blocks[block].leb_num >= slist_len:
add_elements = blocks[block].leb_num - slist_len + 1
slist += (['x'] * add_elements)
slist_len = len(slist)
slist[blocks[block].leb_num] = block
return slist
def by_vol_id(blocks, slist=None):
"""Sort blocks by volume id
Arguments:
Obj:blocks -- List of block objects.
List:slist -- (optional) List of block indexes.
Return:
Dict -- blocks grouped in lists with dict key as volume id.
"""
vol_blocks = {}
# sort block by volume
# not reliable with multiple partitions (fifo)
for i in blocks:
if slist and i not in slist:
continue
elif not blocks[i].is_valid:
continue
if blocks[i].vid_hdr.vol_id not in vol_blocks:
vol_blocks[blocks[i].vid_hdr.vol_id] = []
vol_blocks[blocks[i].vid_hdr.vol_id].append(blocks[i].peb_num)
return vol_blocks
def by_type(blocks, slist=None):
"""Sort blocks into layout, internal volume, data or unknown
Arguments:
Obj:blocks -- List of block objects.
List:slist -- (optional) List of block indexes.
Returns:
List:layout -- List of block indexes of blocks containing the
volume table records.
List:data -- List of block indexes containing filesystem data.
List:int_vol -- List of block indexes containing volume ids
greater than UBI_INTERNAL_VOL_START that are not
layout volumes.
List:unknown -- List of block indexes of blocks that failed validation
of crc in ed_hdr or vid_hdr.
"""
layout = []
data = []
int_vol = []
unknown = []
for i in blocks:
if slist and i not in slist:
continue
if blocks[i].is_vtbl and blocks[i].is_valid:
layout.append(i)
elif blocks[i].is_internal_vol and blocks[i].is_valid:
int_vol.append(i)
elif blocks[i].is_valid:
data.append(i)
else:
unknown.append(i)
return layout, data, int_vol, unknown
|
jrspruitt/ubi_reader
|
ubireader/ubifs/output.py
|
extract_files
|
python
|
def extract_files(ubifs, out_path, perms=False):
try:
inodes = {}
bad_blocks = []
walk.index(ubifs, ubifs.master_node.root_lnum, ubifs.master_node.root_offs, inodes, bad_blocks)
if len(inodes) < 2:
raise Exception('No inodes found')
for dent in inodes[1]['dent']:
extract_dents(ubifs, inodes, dent, out_path, perms)
if len(bad_blocks):
error(extract_files, 'Warning', 'Data may be missing or corrupted, bad blocks, LEB [%s]' % ','.join(map(str, bad_blocks)))
except Exception as e:
error(extract_files, 'Error', '%s' % e)
|
Extract UBIFS contents to_path/
Arguments:
Obj:ubifs -- UBIFS object.
Str:out_path -- Path to extract contents to.
|
train
|
https://github.com/jrspruitt/ubi_reader/blob/7079dd380c1c9896bced30d6d34e8780b9181597/ubireader/ubifs/output.py#L30-L53
|
[
"def index(ubifs, lnum, offset, inodes={}, bad_blocks=[]):\n \"\"\"Walk the index gathering Inode, Dir Entry, and File nodes.\n\n Arguments:\n Obj:ubifs -- UBIFS object.\n Int:lnum -- Logical erase block number.\n Int:offset -- Offset in logical erase block.\n Dict:inodes -- Dict of ino/dent/file nodes keyed to inode number.\n\n Returns:\n Dict:inodes -- Dict of ino/dent/file nodes keyed to inode number.\n 'ino' -- Inode node.\n 'data' -- List of data nodes if present.\n 'dent' -- List of directory entry nodes if present.\n \"\"\"\n try:\n if len(bad_blocks):\n if lnum in bad_blocks:\n return\n\n ubifs.file.seek((ubifs.leb_size * lnum) + offset)\n buf = ubifs.file.read(UBIFS_COMMON_HDR_SZ)\n chdr = nodes.common_hdr(buf)\n log(index , '%s file addr: %s' % (chdr, ubifs.file.last_read_addr()))\n verbose_display(chdr)\n node_buf = ubifs.file.read(chdr.len - UBIFS_COMMON_HDR_SZ)\n file_offset = ubifs.file.last_read_addr()\n\n except Exception as e:\n if str(e) == 'Bad Read Offset Request' and settings.warn_only_block_read_errors:\n bad_blocks.append(lnum)\n return\n\n else:\n error(index, 'Fatal', 'LEB: %s, UBIFS offset: %s, error: %s' % (lnum, ((ubifs.leb_size * lnum) + offset), e))\n\n if chdr.node_type == UBIFS_IDX_NODE:\n try:\n idxn = nodes.idx_node(node_buf)\n\n except Exception as e:\n if settings.warn_only_block_read_errors:\n error(index, 'Error', 'Problem at file address: %s extracting idx_node: %s' % (file_offset, e))\n return\n\n else:\n error(index, 'Fatal', 'Problem at file address: %s extracting idx_node: %s' % (file_offset, e))\n\n log(index, '%s file addr: %s' % (idxn, file_offset))\n verbose_display(idxn)\n branch_idx = 0\n\n for branch in idxn.branches:\n verbose_log(index, '-------------------')\n log(index, '%s file addr: %s' % (branch, file_offset + UBIFS_IDX_NODE_SZ + (branch_idx * UBIFS_BRANCH_SZ)))\n verbose_display(branch)\n index(ubifs, branch.lnum, branch.offs, inodes, bad_blocks)\n branch_idx += 1\n\n elif chdr.node_type == UBIFS_INO_NODE:\n try:\n inon = nodes.ino_node(node_buf)\n\n except Exception as e:\n if settings.warn_only_block_read_errors:\n error(index, 'Error', 'Problem at file address: %s extracting ino_node: %s' % (file_offset, e))\n return\n\n else:\n error(index, 'Fatal', 'Problem at file address: %s extracting ino_node: %s' % (file_offset, e))\n\n ino_num = inon.key['ino_num']\n log(index, '%s file addr: %s, ino num: %s' % (inon, file_offset, ino_num))\n verbose_display(inon)\n\n if not ino_num in inodes:\n inodes[ino_num] = {}\n\n inodes[ino_num]['ino'] = inon\n\n elif chdr.node_type == UBIFS_DATA_NODE:\n try:\n datn = nodes.data_node(node_buf, (ubifs.leb_size * lnum) + UBIFS_COMMON_HDR_SZ + offset + UBIFS_DATA_NODE_SZ)\n\n except Exception as e:\n if settings.warn_only_block_read_errors:\n error(index, 'Error', 'Problem at file address: %s extracting data_node: %s' % (file_offset, e))\n return\n\n else:\n error(index, 'Fatal', 'Problem at file address: %s extracting data_node: %s' % (file_offset, e))\n\n ino_num = datn.key['ino_num']\n log(index, '%s file addr: %s, ino num: %s' % (datn, file_offset, ino_num))\n verbose_display(datn)\n\n if not ino_num in inodes:\n inodes[ino_num] = {}\n\n if not 'data' in inodes[ino_num]:\n inodes[ino_num]['data']= []\n\n inodes[ino_num]['data'].append(datn)\n\n elif chdr.node_type == UBIFS_DENT_NODE:\n try:\n dn = nodes.dent_node(node_buf)\n\n except Exception as e:\n if settings.warn_only_block_read_errors:\n error(index, 'Error', 'Problem at file address: %s extracting dent_node: %s' % (file_offset, e))\n return\n\n else:\n error(index, 'Fatal', 'Problem at file address: %s extracting dent_node: %s' % (file_offset, e))\n\n ino_num = dn.key['ino_num']\n log(index, '%s file addr: %s, ino num: %s' % (dn, file_offset, ino_num))\n verbose_display(dn)\n\n if not ino_num in inodes:\n inodes[ino_num] = {}\n\n if not 'dent' in inodes[ino_num]:\n inodes[ino_num]['dent']= []\n\n inodes[ino_num]['dent'].append(dn)\n",
"def error(obj, level, message):\n if settings.error_action is 'exit':\n print('{} {}: {}'.format(obj.__name__, level, message))\n if settings.fatal_traceback:\n traceback.print_exc()\n sys.exit(1)\n\n else:\n if level.lower() == 'warn':\n print('{} {}: {}'.format(obj.__name__, level, message))\n elif level.lower() == 'fatal':\n print('{} {}: {}'.format(obj.__name__, level, message))\n if settings.fatal_traceback:\n traceback.print_exc()\n sys.exit(1)\n else:\n print('{} {}: {}'.format(obj.__name__, level, message))\n",
"def extract_dents(ubifs, inodes, dent_node, path='', perms=False):\n if dent_node.inum not in inodes:\n error(extract_dents, 'Error', 'inum: %s not found in inodes' % (dent_node.inum))\n return\n\n inode = inodes[dent_node.inum]\n dent_path = os.path.join(path, dent_node.name)\n\n if dent_node.type == UBIFS_ITYPE_DIR:\n try:\n if not os.path.exists(dent_path):\n os.mkdir(dent_path)\n log(extract_dents, 'Make Dir: %s' % (dent_path))\n\n if perms:\n _set_file_perms(dent_path, inode)\n except Exception as e:\n error(extract_dents, 'Warn', 'DIR Fail: %s' % e)\n\n if 'dent' in inode:\n for dnode in inode['dent']:\n extract_dents(ubifs, inodes, dnode, dent_path, perms)\n\n _set_file_timestamps(dent_path, inode)\n\n elif dent_node.type == UBIFS_ITYPE_REG:\n try:\n if inode['ino'].nlink > 1:\n if 'hlink' not in inode:\n inode['hlink'] = dent_path\n buf = _process_reg_file(ubifs, inode, dent_path)\n _write_reg_file(dent_path, buf)\n else:\n os.link(inode['hlink'], dent_path)\n log(extract_dents, 'Make Link: %s > %s' % (dent_path, inode['hlink']))\n else:\n buf = _process_reg_file(ubifs, inode, dent_path)\n _write_reg_file(dent_path, buf)\n\n _set_file_timestamps(dent_path, inode)\n\n if perms:\n _set_file_perms(dent_path, inode)\n\n except Exception as e:\n error(extract_dents, 'Warn', 'FILE Fail: %s' % e)\n\n elif dent_node.type == UBIFS_ITYPE_LNK:\n try:\n # probably will need to decompress ino data if > UBIFS_MIN_COMPR_LEN\n os.symlink('%s' % inode['ino'].data.decode('utf-8'), dent_path)\n log(extract_dents, 'Make Symlink: %s > %s' % (dent_path, inode['ino'].data))\n\n except Exception as e:\n error(extract_dents, 'Warn', 'SYMLINK Fail: %s' % e) \n\n elif dent_node.type in [UBIFS_ITYPE_BLK, UBIFS_ITYPE_CHR]:\n try:\n dev = struct.unpack('<II', inode['ino'].data)[0]\n if True:\n os.mknod(dent_path, inode['ino'].mode, dev)\n log(extract_dents, 'Make Device Node: %s' % (dent_path))\n\n if perms:\n _set_file_perms(path, inode)\n else:\n log(extract_dents, 'Create dummy node.')\n _write_reg_file(dent_path, str(dev))\n\n if perms:\n _set_file_perms(dent_path, inode)\n\n except Exception as e:\n error(extract_dents, 'Warn', 'DEV Fail: %s' % e)\n\n elif dent_node.type == UBIFS_ITYPE_FIFO:\n try:\n os.mkfifo(dent_path, inode['ino'].mode)\n log(extract_dents, 'Make FIFO: %s' % (path))\n\n if perms:\n _set_file_perms(dent_path, inode)\n except Exception as e:\n error(extract_dents, 'Warn', 'FIFO Fail: %s : %s' % (dent_path, e))\n\n elif dent_node.type == UBIFS_ITYPE_SOCK:\n try:\n if settings.use_dummy_socket_file:\n _write_reg_file(dent_path, '')\n if perms:\n _set_file_perms(dent_path, inode)\n except Exception as e:\n error(extract_dents, 'Warn', 'SOCK Fail: %s : %s' % (dent_path, e))\n"
] |
#!/usr/bin/env python
#############################################################
# ubi_reader/ubifs
# (c) 2013 Jason Pruitt (jrspruitt@gmail.com)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#############################################################
import os
import struct
from ubireader import settings
from ubireader.ubifs.defines import *
from ubireader.ubifs import walk
from ubireader.ubifs.misc import decompress
from ubireader.debug import error, log, verbose_log
def extract_dents(ubifs, inodes, dent_node, path='', perms=False):
if dent_node.inum not in inodes:
error(extract_dents, 'Error', 'inum: %s not found in inodes' % (dent_node.inum))
return
inode = inodes[dent_node.inum]
dent_path = os.path.join(path, dent_node.name)
if dent_node.type == UBIFS_ITYPE_DIR:
try:
if not os.path.exists(dent_path):
os.mkdir(dent_path)
log(extract_dents, 'Make Dir: %s' % (dent_path))
if perms:
_set_file_perms(dent_path, inode)
except Exception as e:
error(extract_dents, 'Warn', 'DIR Fail: %s' % e)
if 'dent' in inode:
for dnode in inode['dent']:
extract_dents(ubifs, inodes, dnode, dent_path, perms)
_set_file_timestamps(dent_path, inode)
elif dent_node.type == UBIFS_ITYPE_REG:
try:
if inode['ino'].nlink > 1:
if 'hlink' not in inode:
inode['hlink'] = dent_path
buf = _process_reg_file(ubifs, inode, dent_path)
_write_reg_file(dent_path, buf)
else:
os.link(inode['hlink'], dent_path)
log(extract_dents, 'Make Link: %s > %s' % (dent_path, inode['hlink']))
else:
buf = _process_reg_file(ubifs, inode, dent_path)
_write_reg_file(dent_path, buf)
_set_file_timestamps(dent_path, inode)
if perms:
_set_file_perms(dent_path, inode)
except Exception as e:
error(extract_dents, 'Warn', 'FILE Fail: %s' % e)
elif dent_node.type == UBIFS_ITYPE_LNK:
try:
# probably will need to decompress ino data if > UBIFS_MIN_COMPR_LEN
os.symlink('%s' % inode['ino'].data.decode('utf-8'), dent_path)
log(extract_dents, 'Make Symlink: %s > %s' % (dent_path, inode['ino'].data))
except Exception as e:
error(extract_dents, 'Warn', 'SYMLINK Fail: %s' % e)
elif dent_node.type in [UBIFS_ITYPE_BLK, UBIFS_ITYPE_CHR]:
try:
dev = struct.unpack('<II', inode['ino'].data)[0]
if True:
os.mknod(dent_path, inode['ino'].mode, dev)
log(extract_dents, 'Make Device Node: %s' % (dent_path))
if perms:
_set_file_perms(path, inode)
else:
log(extract_dents, 'Create dummy node.')
_write_reg_file(dent_path, str(dev))
if perms:
_set_file_perms(dent_path, inode)
except Exception as e:
error(extract_dents, 'Warn', 'DEV Fail: %s' % e)
elif dent_node.type == UBIFS_ITYPE_FIFO:
try:
os.mkfifo(dent_path, inode['ino'].mode)
log(extract_dents, 'Make FIFO: %s' % (path))
if perms:
_set_file_perms(dent_path, inode)
except Exception as e:
error(extract_dents, 'Warn', 'FIFO Fail: %s : %s' % (dent_path, e))
elif dent_node.type == UBIFS_ITYPE_SOCK:
try:
if settings.use_dummy_socket_file:
_write_reg_file(dent_path, '')
if perms:
_set_file_perms(dent_path, inode)
except Exception as e:
error(extract_dents, 'Warn', 'SOCK Fail: %s : %s' % (dent_path, e))
def _set_file_perms(path, inode):
os.chmod(path, inode['ino'].mode)
os.chown(path, inode['ino'].uid, inode['ino'].gid)
verbose_log(_set_file_perms, 'perms:%s, owner: %s.%s, path: %s' % (inode['ino'].mode, inode['ino'].uid, inode['ino'].gid, path))
def _set_file_timestamps(path, inode):
os.utime(path, (inode['ino'].atime_sec, inode['ino'].mtime_sec))
verbose_log(_set_file_timestamps, 'timestamps: access: %s, modify: %s, path: %s' % (inode['ino'].atime_sec, inode['ino'].mtime_sec, path))
def _write_reg_file(path, data):
with open(path, 'wb') as f:
f.write(data)
log(_write_reg_file, 'Make File: %s' % (path))
def _process_reg_file(ubifs, inode, path):
try:
buf = b''
if 'data' in inode:
compr_type = 0
sorted_data = sorted(inode['data'], key=lambda x: x.key['khash'])
last_khash = sorted_data[0].key['khash']-1
for data in sorted_data:
# If data nodes are missing in sequence, fill in blanks
# with \x00 * UBIFS_BLOCK_SIZE
if data.key['khash'] - last_khash != 1:
while 1 != (data.key['khash'] - last_khash):
buf += b'\x00'*UBIFS_BLOCK_SIZE
last_khash += 1
compr_type = data.compr_type
ubifs.file.seek(data.offset)
d = ubifs.file.read(data.compr_len)
buf += decompress(compr_type, data.size, d)
last_khash = data.key['khash']
verbose_log(_process_reg_file, 'ino num: %s, compression: %s, path: %s' % (inode['ino'].key['ino_num'], compr_type, path))
except Exception as e:
error(_process_reg_file, 'Warn', 'inode num:%s :%s' % (inode['ino'].key['ino_num'], e))
# Pad end of file with \x00 if needed.
if inode['ino'].size > len(buf):
buf += b'\x00' * (inode['ino'].size - len(buf))
return buf
|
jrspruitt/ubi_reader
|
ubireader/ubi/block/layout.py
|
get_newest
|
python
|
def get_newest(blocks, layout_blocks):
layout_temp = list(layout_blocks)
for i in range(0, len(layout_temp)):
for k in range(0, len(layout_blocks)):
if blocks[layout_temp[i]].ec_hdr.image_seq != blocks[layout_blocks[k]].ec_hdr.image_seq:
continue
if blocks[layout_temp[i]].leb_num != blocks[layout_blocks[k]].leb_num:
continue
if blocks[layout_temp[i]].vid_hdr.sqnum > blocks[layout_blocks[k]].vid_hdr.sqnum:
del layout_blocks[k]
break
return layout_blocks
|
Filter out old layout blocks from list
Arguments:
List:blocks -- List of block objects
List:layout_blocks -- List of layout block indexes
Returns:
List -- Newest layout blocks in list
|
train
|
https://github.com/jrspruitt/ubi_reader/blob/7079dd380c1c9896bced30d6d34e8780b9181597/ubireader/ubi/block/layout.py#L23-L47
| null |
#!/usr/bin/env python
#############################################################
# ubi_reader/ubi
# (c) 2013 Jason Pruitt (jrspruitt@gmail.com)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#############################################################
from ubireader.debug import error, log
from ubireader.ubi.block import sort
def group_pairs(blocks, layout_blocks_list):
"""Sort a list of layout blocks into pairs
Arguments:
List:blocks -- List of block objects
List:layout_blocks -- List of layout block indexes
Returns:
List -- Layout block pair indexes grouped in a list
"""
image_dict={}
for block_id in layout_blocks_list:
image_seq=blocks[block_id].ec_hdr.image_seq
if image_seq not in image_dict:
image_dict[image_seq]=[block_id]
else:
image_dict[image_seq].append(block_id)
log(group_pairs, 'Layout blocks found at PEBs: %s' % list(image_dict.values()))
return list(image_dict.values())
def associate_blocks(blocks, layout_pairs, start_peb_num):
"""Group block indexes with appropriate layout pairs
Arguments:
List:blocks -- List of block objects
List:layout_pairs -- List of grouped layout blocks
Int:start_peb_num -- Number of the PEB to start from.
Returns:
List -- Layout block pairs grouped with associated block ranges.
"""
seq_blocks = []
for layout_pair in layout_pairs:
seq_blocks = sort.by_image_seq(blocks, blocks[layout_pair[0]].ec_hdr.image_seq)
layout_pair.append(seq_blocks)
return layout_pairs
|
jrspruitt/ubi_reader
|
ubireader/ubi/block/layout.py
|
group_pairs
|
python
|
def group_pairs(blocks, layout_blocks_list):
image_dict={}
for block_id in layout_blocks_list:
image_seq=blocks[block_id].ec_hdr.image_seq
if image_seq not in image_dict:
image_dict[image_seq]=[block_id]
else:
image_dict[image_seq].append(block_id)
log(group_pairs, 'Layout blocks found at PEBs: %s' % list(image_dict.values()))
return list(image_dict.values())
|
Sort a list of layout blocks into pairs
Arguments:
List:blocks -- List of block objects
List:layout_blocks -- List of layout block indexes
Returns:
List -- Layout block pair indexes grouped in a list
|
train
|
https://github.com/jrspruitt/ubi_reader/blob/7079dd380c1c9896bced30d6d34e8780b9181597/ubireader/ubi/block/layout.py#L50-L71
|
[
"def log(obj, message):\n if settings.logging_on or settings.logging_on_verbose:\n print('{} {}'.format(obj.__name__, message))\n"
] |
#!/usr/bin/env python
#############################################################
# ubi_reader/ubi
# (c) 2013 Jason Pruitt (jrspruitt@gmail.com)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#############################################################
from ubireader.debug import error, log
from ubireader.ubi.block import sort
def get_newest(blocks, layout_blocks):
"""Filter out old layout blocks from list
Arguments:
List:blocks -- List of block objects
List:layout_blocks -- List of layout block indexes
Returns:
List -- Newest layout blocks in list
"""
layout_temp = list(layout_blocks)
for i in range(0, len(layout_temp)):
for k in range(0, len(layout_blocks)):
if blocks[layout_temp[i]].ec_hdr.image_seq != blocks[layout_blocks[k]].ec_hdr.image_seq:
continue
if blocks[layout_temp[i]].leb_num != blocks[layout_blocks[k]].leb_num:
continue
if blocks[layout_temp[i]].vid_hdr.sqnum > blocks[layout_blocks[k]].vid_hdr.sqnum:
del layout_blocks[k]
break
return layout_blocks
def associate_blocks(blocks, layout_pairs, start_peb_num):
"""Group block indexes with appropriate layout pairs
Arguments:
List:blocks -- List of block objects
List:layout_pairs -- List of grouped layout blocks
Int:start_peb_num -- Number of the PEB to start from.
Returns:
List -- Layout block pairs grouped with associated block ranges.
"""
seq_blocks = []
for layout_pair in layout_pairs:
seq_blocks = sort.by_image_seq(blocks, blocks[layout_pair[0]].ec_hdr.image_seq)
layout_pair.append(seq_blocks)
return layout_pairs
|
jrspruitt/ubi_reader
|
ubireader/ubi/block/layout.py
|
associate_blocks
|
python
|
def associate_blocks(blocks, layout_pairs, start_peb_num):
seq_blocks = []
for layout_pair in layout_pairs:
seq_blocks = sort.by_image_seq(blocks, blocks[layout_pair[0]].ec_hdr.image_seq)
layout_pair.append(seq_blocks)
return layout_pairs
|
Group block indexes with appropriate layout pairs
Arguments:
List:blocks -- List of block objects
List:layout_pairs -- List of grouped layout blocks
Int:start_peb_num -- Number of the PEB to start from.
Returns:
List -- Layout block pairs grouped with associated block ranges.
|
train
|
https://github.com/jrspruitt/ubi_reader/blob/7079dd380c1c9896bced30d6d34e8780b9181597/ubireader/ubi/block/layout.py#L74-L91
|
[
"def by_image_seq(blocks, image_seq):\n \"\"\"Filter blocks to return only those associated with the provided image_seq number.\n\n Argument:\n List:blocks -- List of block objects to sort.\n Int:image_seq -- image_seq number found in ec_hdr.\n\n Returns:\n List -- List of block indexes matching image_seq number.\n \"\"\"\n return list(filter(lambda block: blocks[block].ec_hdr.image_seq == image_seq, blocks))\n"
] |
#!/usr/bin/env python
#############################################################
# ubi_reader/ubi
# (c) 2013 Jason Pruitt (jrspruitt@gmail.com)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#############################################################
from ubireader.debug import error, log
from ubireader.ubi.block import sort
def get_newest(blocks, layout_blocks):
"""Filter out old layout blocks from list
Arguments:
List:blocks -- List of block objects
List:layout_blocks -- List of layout block indexes
Returns:
List -- Newest layout blocks in list
"""
layout_temp = list(layout_blocks)
for i in range(0, len(layout_temp)):
for k in range(0, len(layout_blocks)):
if blocks[layout_temp[i]].ec_hdr.image_seq != blocks[layout_blocks[k]].ec_hdr.image_seq:
continue
if blocks[layout_temp[i]].leb_num != blocks[layout_blocks[k]].leb_num:
continue
if blocks[layout_temp[i]].vid_hdr.sqnum > blocks[layout_blocks[k]].vid_hdr.sqnum:
del layout_blocks[k]
break
return layout_blocks
def group_pairs(blocks, layout_blocks_list):
"""Sort a list of layout blocks into pairs
Arguments:
List:blocks -- List of block objects
List:layout_blocks -- List of layout block indexes
Returns:
List -- Layout block pair indexes grouped in a list
"""
image_dict={}
for block_id in layout_blocks_list:
image_seq=blocks[block_id].ec_hdr.image_seq
if image_seq not in image_dict:
image_dict[image_seq]=[block_id]
else:
image_dict[image_seq].append(block_id)
log(group_pairs, 'Layout blocks found at PEBs: %s' % list(image_dict.values()))
return list(image_dict.values())
|
jrspruitt/ubi_reader
|
ubireader/ubi/volume.py
|
get_volumes
|
python
|
def get_volumes(blocks, layout_info):
volumes = {}
vol_blocks_lists = sort.by_vol_id(blocks, layout_info[2])
for vol_rec in blocks[layout_info[0]].vtbl_recs:
vol_name = vol_rec.name.strip(b'\x00').decode('utf-8')
if vol_rec.rec_index not in vol_blocks_lists:
vol_blocks_lists[vol_rec.rec_index] = []
volumes[vol_name] = description(vol_rec.rec_index, vol_rec, vol_blocks_lists[vol_rec.rec_index])
return volumes
|
Get a list of UBI volume objects from list of blocks
Arguments:
List:blocks -- List of layout block objects
List:layout_info -- Layout info (indexes of layout blocks and
associated data blocks.)
Returns:
Dict -- Of Volume objects by volume name, including any
relevant blocks.
|
train
|
https://github.com/jrspruitt/ubi_reader/blob/7079dd380c1c9896bced30d6d34e8780b9181597/ubireader/ubi/volume.py#L98-L120
|
[
"def by_vol_id(blocks, slist=None):\n \"\"\"Sort blocks by volume id\n\n Arguments:\n Obj:blocks -- List of block objects.\n List:slist -- (optional) List of block indexes.\n\n Return:\n Dict -- blocks grouped in lists with dict key as volume id.\n \"\"\"\n\n vol_blocks = {}\n\n # sort block by volume\n # not reliable with multiple partitions (fifo)\n\n for i in blocks:\n if slist and i not in slist:\n continue\n elif not blocks[i].is_valid:\n continue\n\n if blocks[i].vid_hdr.vol_id not in vol_blocks:\n vol_blocks[blocks[i].vid_hdr.vol_id] = []\n\n vol_blocks[blocks[i].vid_hdr.vol_id].append(blocks[i].peb_num)\n\n return vol_blocks\n"
] |
#!/usr/bin/env python
#############################################################
# ubi_reader/ubi
# (c) 2013 Jason Pruitt (jrspruitt@gmail.com)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#############################################################
from ubireader.debug import log
from ubireader.ubi import display
from ubireader.ubi.block import sort, get_blocks_in_list
class description(object):
"""UBI Volume object
Attributes:
Int:vol_id -- Volume ID
Str:name -- Name of volume.
Obj:vol_rec -- Volume record object
Int:block_count -- Number of block associated with volume.
Methods:
display(tab) -- Print Volume information
Str:tab -- (optional) '\t' to preface lines with.
get_blocks(blocks) -- Returns list of block objects tied to this volume
Volume object is basically a list of block indexes and some metadata
describing a volume found in a UBI image.
"""
def __init__(self, vol_id, vol_rec, block_list):
self._vol_id = vol_id
self._vol_rec = vol_rec
self._name = self._vol_rec.name
self._block_list = block_list
log(description, 'Create Volume: %s, ID: %s, Block Cnt: %s' % (self.name, self.vol_id, len(self.block_list)))
def __repr__(self):
return 'Volume: %s' % (self.name.decode('utf-8'))
def _get_name(self):
return self._name
name = property(_get_name)
def _get_vol_id(self):
return self._vol_id
vol_id = property(_get_vol_id)
def _get_block_count(self):
return len(self._block_list)
block_count = property(_get_block_count)
def _get_vol_rec(self):
return self._vol_rec
vol_rec = property(_get_vol_rec)
def _get_block_list(self):
return self._block_list
block_list = property(_get_block_list)
def get_blocks(self, blocks):
return get_blocks_in_list(blocks, self._block_list)
def display(self, tab=''):
return display.volume(self, tab)
def reader(self, ubi):
last_leb = 0
for block in sort.by_leb(self.get_blocks(ubi.blocks)):
if block == 'x':
last_leb += 1
yield b'\xff'*ubi.leb_size
else:
last_leb += 1
yield ubi.file.read_block_data(ubi.blocks[block])
def get_volumes(blocks, layout_info):
"""Get a list of UBI volume objects from list of blocks
Arguments:
List:blocks -- List of layout block objects
List:layout_info -- Layout info (indexes of layout blocks and
associated data blocks.)
Returns:
Dict -- Of Volume objects by volume name, including any
relevant blocks.
"""
volumes = {}
vol_blocks_lists = sort.by_vol_id(blocks, layout_info[2])
for vol_rec in blocks[layout_info[0]].vtbl_recs:
vol_name = vol_rec.name.strip(b'\x00').decode('utf-8')
if vol_rec.rec_index not in vol_blocks_lists:
vol_blocks_lists[vol_rec.rec_index] = []
volumes[vol_name] = description(vol_rec.rec_index, vol_rec, vol_blocks_lists[vol_rec.rec_index])
return volumes
|
jrspruitt/ubi_reader
|
ubireader/ubifs/misc.py
|
parse_key
|
python
|
def parse_key(key):
hkey, lkey = struct.unpack('<II',key[0:UBIFS_SK_LEN])
ino_num = hkey & UBIFS_S_KEY_HASH_MASK
key_type = lkey >> UBIFS_S_KEY_BLOCK_BITS
khash = lkey
#if key_type < UBIFS_KEY_TYPES_CNT:
return {'type':key_type, 'ino_num':ino_num, 'khash': khash}
|
Parse node key
Arguments:
Str:key -- Hex string literal of node key.
Returns:
Int:key_type -- Type of key, data, ino, dent, etc.
Int:ino_num -- Inode number.
Int:khash -- Key hash.
|
train
|
https://github.com/jrspruitt/ubi_reader/blob/7079dd380c1c9896bced30d6d34e8780b9181597/ubireader/ubifs/misc.py#L31-L48
| null |
#!/usr/bin/env python
#############################################################
# ubi_reader/ubifs
# (c) 2013 Jason Pruitt (jrspruitt@gmail.com)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#############################################################
import lzo
import struct
import zlib
from ubireader.ubifs.defines import *
from ubireader.debug import error
# For happy printing
ino_types = ['file', 'dir','lnk','blk','chr','fifo','sock']
node_types = ['ino','data','dent','xent','trun','pad','sb','mst','ref','idx','cs','orph']
key_types = ['ino','data','dent','xent']
def decompress(ctype, unc_len, data):
"""Decompress data.
Arguments:
Int:ctype -- Compression type LZO, ZLIB (*currently unused*).
Int:unc_len -- Uncompressed data lenth.
Str:data -- Data to be uncompessed.
Returns:
Uncompressed Data.
"""
if ctype == UBIFS_COMPR_LZO:
try:
return lzo.decompress(b''.join((b'\xf0', struct.pack('>I', unc_len), data)))
except Exception as e:
error(decompress, 'Warn', 'LZO Error: %s' % e)
elif ctype == UBIFS_COMPR_ZLIB:
try:
return zlib.decompress(data, -11)
except Exception as e:
error(decompress, 'Warn', 'ZLib Error: %s' % e)
else:
return data
|
jrspruitt/ubi_reader
|
ubireader/ubifs/misc.py
|
decompress
|
python
|
def decompress(ctype, unc_len, data):
if ctype == UBIFS_COMPR_LZO:
try:
return lzo.decompress(b''.join((b'\xf0', struct.pack('>I', unc_len), data)))
except Exception as e:
error(decompress, 'Warn', 'LZO Error: %s' % e)
elif ctype == UBIFS_COMPR_ZLIB:
try:
return zlib.decompress(data, -11)
except Exception as e:
error(decompress, 'Warn', 'ZLib Error: %s' % e)
else:
return data
|
Decompress data.
Arguments:
Int:ctype -- Compression type LZO, ZLIB (*currently unused*).
Int:unc_len -- Uncompressed data lenth.
Str:data -- Data to be uncompessed.
Returns:
Uncompressed Data.
|
train
|
https://github.com/jrspruitt/ubi_reader/blob/7079dd380c1c9896bced30d6d34e8780b9181597/ubireader/ubifs/misc.py#L51-L73
|
[
"def error(obj, level, message):\n if settings.error_action is 'exit':\n print('{} {}: {}'.format(obj.__name__, level, message))\n if settings.fatal_traceback:\n traceback.print_exc()\n sys.exit(1)\n\n else:\n if level.lower() == 'warn':\n print('{} {}: {}'.format(obj.__name__, level, message))\n elif level.lower() == 'fatal':\n print('{} {}: {}'.format(obj.__name__, level, message))\n if settings.fatal_traceback:\n traceback.print_exc()\n sys.exit(1)\n else:\n print('{} {}: {}'.format(obj.__name__, level, message))\n"
] |
#!/usr/bin/env python
#############################################################
# ubi_reader/ubifs
# (c) 2013 Jason Pruitt (jrspruitt@gmail.com)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#############################################################
import lzo
import struct
import zlib
from ubireader.ubifs.defines import *
from ubireader.debug import error
# For happy printing
ino_types = ['file', 'dir','lnk','blk','chr','fifo','sock']
node_types = ['ino','data','dent','xent','trun','pad','sb','mst','ref','idx','cs','orph']
key_types = ['ino','data','dent','xent']
def parse_key(key):
"""Parse node key
Arguments:
Str:key -- Hex string literal of node key.
Returns:
Int:key_type -- Type of key, data, ino, dent, etc.
Int:ino_num -- Inode number.
Int:khash -- Key hash.
"""
hkey, lkey = struct.unpack('<II',key[0:UBIFS_SK_LEN])
ino_num = hkey & UBIFS_S_KEY_HASH_MASK
key_type = lkey >> UBIFS_S_KEY_BLOCK_BITS
khash = lkey
#if key_type < UBIFS_KEY_TYPES_CNT:
return {'type':key_type, 'ino_num':ino_num, 'khash': khash}
|
jrspruitt/ubi_reader
|
ubireader/ubifs/walk.py
|
index
|
python
|
def index(ubifs, lnum, offset, inodes={}, bad_blocks=[]):
try:
if len(bad_blocks):
if lnum in bad_blocks:
return
ubifs.file.seek((ubifs.leb_size * lnum) + offset)
buf = ubifs.file.read(UBIFS_COMMON_HDR_SZ)
chdr = nodes.common_hdr(buf)
log(index , '%s file addr: %s' % (chdr, ubifs.file.last_read_addr()))
verbose_display(chdr)
node_buf = ubifs.file.read(chdr.len - UBIFS_COMMON_HDR_SZ)
file_offset = ubifs.file.last_read_addr()
except Exception as e:
if str(e) == 'Bad Read Offset Request' and settings.warn_only_block_read_errors:
bad_blocks.append(lnum)
return
else:
error(index, 'Fatal', 'LEB: %s, UBIFS offset: %s, error: %s' % (lnum, ((ubifs.leb_size * lnum) + offset), e))
if chdr.node_type == UBIFS_IDX_NODE:
try:
idxn = nodes.idx_node(node_buf)
except Exception as e:
if settings.warn_only_block_read_errors:
error(index, 'Error', 'Problem at file address: %s extracting idx_node: %s' % (file_offset, e))
return
else:
error(index, 'Fatal', 'Problem at file address: %s extracting idx_node: %s' % (file_offset, e))
log(index, '%s file addr: %s' % (idxn, file_offset))
verbose_display(idxn)
branch_idx = 0
for branch in idxn.branches:
verbose_log(index, '-------------------')
log(index, '%s file addr: %s' % (branch, file_offset + UBIFS_IDX_NODE_SZ + (branch_idx * UBIFS_BRANCH_SZ)))
verbose_display(branch)
index(ubifs, branch.lnum, branch.offs, inodes, bad_blocks)
branch_idx += 1
elif chdr.node_type == UBIFS_INO_NODE:
try:
inon = nodes.ino_node(node_buf)
except Exception as e:
if settings.warn_only_block_read_errors:
error(index, 'Error', 'Problem at file address: %s extracting ino_node: %s' % (file_offset, e))
return
else:
error(index, 'Fatal', 'Problem at file address: %s extracting ino_node: %s' % (file_offset, e))
ino_num = inon.key['ino_num']
log(index, '%s file addr: %s, ino num: %s' % (inon, file_offset, ino_num))
verbose_display(inon)
if not ino_num in inodes:
inodes[ino_num] = {}
inodes[ino_num]['ino'] = inon
elif chdr.node_type == UBIFS_DATA_NODE:
try:
datn = nodes.data_node(node_buf, (ubifs.leb_size * lnum) + UBIFS_COMMON_HDR_SZ + offset + UBIFS_DATA_NODE_SZ)
except Exception as e:
if settings.warn_only_block_read_errors:
error(index, 'Error', 'Problem at file address: %s extracting data_node: %s' % (file_offset, e))
return
else:
error(index, 'Fatal', 'Problem at file address: %s extracting data_node: %s' % (file_offset, e))
ino_num = datn.key['ino_num']
log(index, '%s file addr: %s, ino num: %s' % (datn, file_offset, ino_num))
verbose_display(datn)
if not ino_num in inodes:
inodes[ino_num] = {}
if not 'data' in inodes[ino_num]:
inodes[ino_num]['data']= []
inodes[ino_num]['data'].append(datn)
elif chdr.node_type == UBIFS_DENT_NODE:
try:
dn = nodes.dent_node(node_buf)
except Exception as e:
if settings.warn_only_block_read_errors:
error(index, 'Error', 'Problem at file address: %s extracting dent_node: %s' % (file_offset, e))
return
else:
error(index, 'Fatal', 'Problem at file address: %s extracting dent_node: %s' % (file_offset, e))
ino_num = dn.key['ino_num']
log(index, '%s file addr: %s, ino num: %s' % (dn, file_offset, ino_num))
verbose_display(dn)
if not ino_num in inodes:
inodes[ino_num] = {}
if not 'dent' in inodes[ino_num]:
inodes[ino_num]['dent']= []
inodes[ino_num]['dent'].append(dn)
|
Walk the index gathering Inode, Dir Entry, and File nodes.
Arguments:
Obj:ubifs -- UBIFS object.
Int:lnum -- Logical erase block number.
Int:offset -- Offset in logical erase block.
Dict:inodes -- Dict of ino/dent/file nodes keyed to inode number.
Returns:
Dict:inodes -- Dict of ino/dent/file nodes keyed to inode number.
'ino' -- Inode node.
'data' -- List of data nodes if present.
'dent' -- List of directory entry nodes if present.
|
train
|
https://github.com/jrspruitt/ubi_reader/blob/7079dd380c1c9896bced30d6d34e8780b9181597/ubireader/ubifs/walk.py#L25-L151
|
[
"def index(ubifs, lnum, offset, inodes={}, bad_blocks=[]):\n \"\"\"Walk the index gathering Inode, Dir Entry, and File nodes.\n\n Arguments:\n Obj:ubifs -- UBIFS object.\n Int:lnum -- Logical erase block number.\n Int:offset -- Offset in logical erase block.\n Dict:inodes -- Dict of ino/dent/file nodes keyed to inode number.\n\n Returns:\n Dict:inodes -- Dict of ino/dent/file nodes keyed to inode number.\n 'ino' -- Inode node.\n 'data' -- List of data nodes if present.\n 'dent' -- List of directory entry nodes if present.\n \"\"\"\n try:\n if len(bad_blocks):\n if lnum in bad_blocks:\n return\n\n ubifs.file.seek((ubifs.leb_size * lnum) + offset)\n buf = ubifs.file.read(UBIFS_COMMON_HDR_SZ)\n chdr = nodes.common_hdr(buf)\n log(index , '%s file addr: %s' % (chdr, ubifs.file.last_read_addr()))\n verbose_display(chdr)\n node_buf = ubifs.file.read(chdr.len - UBIFS_COMMON_HDR_SZ)\n file_offset = ubifs.file.last_read_addr()\n\n except Exception as e:\n if str(e) == 'Bad Read Offset Request' and settings.warn_only_block_read_errors:\n bad_blocks.append(lnum)\n return\n\n else:\n error(index, 'Fatal', 'LEB: %s, UBIFS offset: %s, error: %s' % (lnum, ((ubifs.leb_size * lnum) + offset), e))\n\n if chdr.node_type == UBIFS_IDX_NODE:\n try:\n idxn = nodes.idx_node(node_buf)\n\n except Exception as e:\n if settings.warn_only_block_read_errors:\n error(index, 'Error', 'Problem at file address: %s extracting idx_node: %s' % (file_offset, e))\n return\n\n else:\n error(index, 'Fatal', 'Problem at file address: %s extracting idx_node: %s' % (file_offset, e))\n\n log(index, '%s file addr: %s' % (idxn, file_offset))\n verbose_display(idxn)\n branch_idx = 0\n\n for branch in idxn.branches:\n verbose_log(index, '-------------------')\n log(index, '%s file addr: %s' % (branch, file_offset + UBIFS_IDX_NODE_SZ + (branch_idx * UBIFS_BRANCH_SZ)))\n verbose_display(branch)\n index(ubifs, branch.lnum, branch.offs, inodes, bad_blocks)\n branch_idx += 1\n\n elif chdr.node_type == UBIFS_INO_NODE:\n try:\n inon = nodes.ino_node(node_buf)\n\n except Exception as e:\n if settings.warn_only_block_read_errors:\n error(index, 'Error', 'Problem at file address: %s extracting ino_node: %s' % (file_offset, e))\n return\n\n else:\n error(index, 'Fatal', 'Problem at file address: %s extracting ino_node: %s' % (file_offset, e))\n\n ino_num = inon.key['ino_num']\n log(index, '%s file addr: %s, ino num: %s' % (inon, file_offset, ino_num))\n verbose_display(inon)\n\n if not ino_num in inodes:\n inodes[ino_num] = {}\n\n inodes[ino_num]['ino'] = inon\n\n elif chdr.node_type == UBIFS_DATA_NODE:\n try:\n datn = nodes.data_node(node_buf, (ubifs.leb_size * lnum) + UBIFS_COMMON_HDR_SZ + offset + UBIFS_DATA_NODE_SZ)\n\n except Exception as e:\n if settings.warn_only_block_read_errors:\n error(index, 'Error', 'Problem at file address: %s extracting data_node: %s' % (file_offset, e))\n return\n\n else:\n error(index, 'Fatal', 'Problem at file address: %s extracting data_node: %s' % (file_offset, e))\n\n ino_num = datn.key['ino_num']\n log(index, '%s file addr: %s, ino num: %s' % (datn, file_offset, ino_num))\n verbose_display(datn)\n\n if not ino_num in inodes:\n inodes[ino_num] = {}\n\n if not 'data' in inodes[ino_num]:\n inodes[ino_num]['data']= []\n\n inodes[ino_num]['data'].append(datn)\n\n elif chdr.node_type == UBIFS_DENT_NODE:\n try:\n dn = nodes.dent_node(node_buf)\n\n except Exception as e:\n if settings.warn_only_block_read_errors:\n error(index, 'Error', 'Problem at file address: %s extracting dent_node: %s' % (file_offset, e))\n return\n\n else:\n error(index, 'Fatal', 'Problem at file address: %s extracting dent_node: %s' % (file_offset, e))\n\n ino_num = dn.key['ino_num']\n log(index, '%s file addr: %s, ino num: %s' % (dn, file_offset, ino_num))\n verbose_display(dn)\n\n if not ino_num in inodes:\n inodes[ino_num] = {}\n\n if not 'dent' in inodes[ino_num]:\n inodes[ino_num]['dent']= []\n\n inodes[ino_num]['dent'].append(dn)\n",
"def error(obj, level, message):\n if settings.error_action is 'exit':\n print('{} {}: {}'.format(obj.__name__, level, message))\n if settings.fatal_traceback:\n traceback.print_exc()\n sys.exit(1)\n\n else:\n if level.lower() == 'warn':\n print('{} {}: {}'.format(obj.__name__, level, message))\n elif level.lower() == 'fatal':\n print('{} {}: {}'.format(obj.__name__, level, message))\n if settings.fatal_traceback:\n traceback.print_exc()\n sys.exit(1)\n else:\n print('{} {}: {}'.format(obj.__name__, level, message))\n",
"def log(obj, message):\n if settings.logging_on or settings.logging_on_verbose:\n print('{} {}'.format(obj.__name__, message))\n",
"def verbose_display(displayable_obj):\n if settings.logging_on_verbose:\n print(displayable_obj.display('\\t'))\n",
"def verbose_log(obj, message):\n if settings.logging_on_verbose:\n log(obj, message)\n"
] |
#!/usr/bin/env python
#############################################################
# ubi_reader/ubifs
# (c) 2013 Jason Pruitt (jrspruitt@gmail.com)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#############################################################
from ubireader import settings
from ubireader.ubifs import nodes
from ubireader.ubifs.defines import *
from ubireader.debug import error, log, verbose_log, verbose_display
|
jrspruitt/ubi_reader
|
ubireader/utils.py
|
guess_leb_size
|
python
|
def guess_leb_size(path):
f = open(path, 'rb')
f.seek(0,2)
file_size = f.tell()+1
f.seek(0)
block_size = None
for _ in range(0, file_size, FILE_CHUNK_SZ):
buf = f.read(FILE_CHUNK_SZ)
for m in re.finditer(UBIFS_NODE_MAGIC, buf):
start = m.start()
chdr = nodes.common_hdr(buf[start:start+UBIFS_COMMON_HDR_SZ])
if chdr and chdr.node_type == UBIFS_SB_NODE:
sb_start = start + UBIFS_COMMON_HDR_SZ
sb_end = sb_start + UBIFS_SB_NODE_SZ
if chdr.len != len(buf[sb_start:sb_end]):
f.seek(sb_start)
buf = f.read(UBIFS_SB_NODE_SZ)
else:
buf = buf[sb_start:sb_end]
sbn = nodes.sb_node(buf)
block_size = sbn.leb_size
f.close()
return block_size
f.close()
return block_size
|
Get LEB size from superblock
Arguments:
Str:path -- Path to file.
Returns:
Int -- LEB size.
Searches file for superblock and retrieves leb size.
|
train
|
https://github.com/jrspruitt/ubi_reader/blob/7079dd380c1c9896bced30d6d34e8780b9181597/ubireader/utils.py#L86-L127
| null |
#!/usr/bin/env python
#############################################################
# ubi_reader
# (c) 2013 Jason Pruitt (jrspruitt@gmail.com)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#############################################################
import re
from ubireader.debug import error, log
from ubireader.ubi.defines import UBI_EC_HDR_MAGIC, FILE_CHUNK_SZ
from ubireader.ubifs.defines import UBIFS_NODE_MAGIC, UBIFS_SB_NODE_SZ, UBIFS_SB_NODE, UBIFS_COMMON_HDR_SZ
from ubireader.ubifs import nodes
def guess_start_offset(path, guess_offset=0):
file_offset = guess_offset
f = open(path, 'rb')
f.seek(0,2)
file_size = f.tell()+1
f.seek(guess_offset)
for _ in range(0, file_size, FILE_CHUNK_SZ):
buf = f.read(FILE_CHUNK_SZ)
ubi_loc = buf.find(UBI_EC_HDR_MAGIC)
ubifs_loc = buf.find(UBIFS_NODE_MAGIC)
if ubi_loc == -1 and ubifs_loc == -1:
file_offset += FILE_CHUNK_SZ
continue
else:
if ubi_loc == -1:
ubi_loc = file_size + 1
elif ubifs_loc == -1:
ubifs_loc = file_size + 1
if ubi_loc < ubifs_loc:
log(guess_start_offset, 'Found UBI magic number at %s' % (file_offset + ubi_loc))
return file_offset + ubi_loc
elif ubifs_loc < ubi_loc:
log(guess_start_offset, 'Found UBIFS magic number at %s' % (file_offset + ubifs_loc))
return file_offset + ubifs_loc
else:
error(guess_start_offset, 'Fatal', 'Could not determine start offset.')
else:
error(guess_start_offset, 'Fatal', 'Could not determine start offset.')
f.close()
def guess_filetype(path, start_offset=0):
log(guess_filetype, 'Looking for file type at %s' % start_offset)
with open(path, 'rb') as f:
f.seek(start_offset)
buf = f.read(4)
if buf == UBI_EC_HDR_MAGIC:
ftype = UBI_EC_HDR_MAGIC
log(guess_filetype, 'File looks like a UBI image.')
elif buf == UBIFS_NODE_MAGIC:
ftype = UBIFS_NODE_MAGIC
log(guess_filetype, 'File looks like a UBIFS image.')
else:
ftype = None
error(guess_filetype, 'Fatal', 'Could not determine file type.')
return ftype
def guess_leb_size(path):
"""Get LEB size from superblock
Arguments:
Str:path -- Path to file.
Returns:
Int -- LEB size.
Searches file for superblock and retrieves leb size.
"""
f = open(path, 'rb')
f.seek(0,2)
file_size = f.tell()+1
f.seek(0)
block_size = None
for _ in range(0, file_size, FILE_CHUNK_SZ):
buf = f.read(FILE_CHUNK_SZ)
for m in re.finditer(UBIFS_NODE_MAGIC, buf):
start = m.start()
chdr = nodes.common_hdr(buf[start:start+UBIFS_COMMON_HDR_SZ])
if chdr and chdr.node_type == UBIFS_SB_NODE:
sb_start = start + UBIFS_COMMON_HDR_SZ
sb_end = sb_start + UBIFS_SB_NODE_SZ
if chdr.len != len(buf[sb_start:sb_end]):
f.seek(sb_start)
buf = f.read(UBIFS_SB_NODE_SZ)
else:
buf = buf[sb_start:sb_end]
sbn = nodes.sb_node(buf)
block_size = sbn.leb_size
f.close()
return block_size
f.close()
return block_size
def guess_peb_size(path):
"""Determine the most likely block size
Arguments:
Str:path -- Path to file.
Returns:
Int -- PEB size.
Searches file for Magic Number, picks most
common length between them.
"""
file_offset = 0
offsets = []
f = open(path, 'rb')
f.seek(0,2)
file_size = f.tell()+1
f.seek(0)
for _ in range(0, file_size, FILE_CHUNK_SZ):
buf = f.read(FILE_CHUNK_SZ)
for m in re.finditer(UBI_EC_HDR_MAGIC, buf):
start = m.start()
if not file_offset:
file_offset = start
idx = start
else:
idx = start+file_offset
offsets.append(idx)
file_offset += FILE_CHUNK_SZ
f.close()
occurances = {}
for i in range(0, len(offsets)):
try:
diff = offsets[i] - offsets[i-1]
except:
diff = offsets[i]
if diff not in occurances:
occurances[diff] = 0
occurances[diff] += 1
most_frequent = 0
block_size = None
for offset in occurances:
if occurances[offset] > most_frequent:
most_frequent = occurances[offset]
block_size = offset
return block_size
|
jrspruitt/ubi_reader
|
ubireader/utils.py
|
guess_peb_size
|
python
|
def guess_peb_size(path):
file_offset = 0
offsets = []
f = open(path, 'rb')
f.seek(0,2)
file_size = f.tell()+1
f.seek(0)
for _ in range(0, file_size, FILE_CHUNK_SZ):
buf = f.read(FILE_CHUNK_SZ)
for m in re.finditer(UBI_EC_HDR_MAGIC, buf):
start = m.start()
if not file_offset:
file_offset = start
idx = start
else:
idx = start+file_offset
offsets.append(idx)
file_offset += FILE_CHUNK_SZ
f.close()
occurances = {}
for i in range(0, len(offsets)):
try:
diff = offsets[i] - offsets[i-1]
except:
diff = offsets[i]
if diff not in occurances:
occurances[diff] = 0
occurances[diff] += 1
most_frequent = 0
block_size = None
for offset in occurances:
if occurances[offset] > most_frequent:
most_frequent = occurances[offset]
block_size = offset
return block_size
|
Determine the most likely block size
Arguments:
Str:path -- Path to file.
Returns:
Int -- PEB size.
Searches file for Magic Number, picks most
common length between them.
|
train
|
https://github.com/jrspruitt/ubi_reader/blob/7079dd380c1c9896bced30d6d34e8780b9181597/ubireader/utils.py#L131-L186
| null |
#!/usr/bin/env python
#############################################################
# ubi_reader
# (c) 2013 Jason Pruitt (jrspruitt@gmail.com)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#############################################################
import re
from ubireader.debug import error, log
from ubireader.ubi.defines import UBI_EC_HDR_MAGIC, FILE_CHUNK_SZ
from ubireader.ubifs.defines import UBIFS_NODE_MAGIC, UBIFS_SB_NODE_SZ, UBIFS_SB_NODE, UBIFS_COMMON_HDR_SZ
from ubireader.ubifs import nodes
def guess_start_offset(path, guess_offset=0):
file_offset = guess_offset
f = open(path, 'rb')
f.seek(0,2)
file_size = f.tell()+1
f.seek(guess_offset)
for _ in range(0, file_size, FILE_CHUNK_SZ):
buf = f.read(FILE_CHUNK_SZ)
ubi_loc = buf.find(UBI_EC_HDR_MAGIC)
ubifs_loc = buf.find(UBIFS_NODE_MAGIC)
if ubi_loc == -1 and ubifs_loc == -1:
file_offset += FILE_CHUNK_SZ
continue
else:
if ubi_loc == -1:
ubi_loc = file_size + 1
elif ubifs_loc == -1:
ubifs_loc = file_size + 1
if ubi_loc < ubifs_loc:
log(guess_start_offset, 'Found UBI magic number at %s' % (file_offset + ubi_loc))
return file_offset + ubi_loc
elif ubifs_loc < ubi_loc:
log(guess_start_offset, 'Found UBIFS magic number at %s' % (file_offset + ubifs_loc))
return file_offset + ubifs_loc
else:
error(guess_start_offset, 'Fatal', 'Could not determine start offset.')
else:
error(guess_start_offset, 'Fatal', 'Could not determine start offset.')
f.close()
def guess_filetype(path, start_offset=0):
log(guess_filetype, 'Looking for file type at %s' % start_offset)
with open(path, 'rb') as f:
f.seek(start_offset)
buf = f.read(4)
if buf == UBI_EC_HDR_MAGIC:
ftype = UBI_EC_HDR_MAGIC
log(guess_filetype, 'File looks like a UBI image.')
elif buf == UBIFS_NODE_MAGIC:
ftype = UBIFS_NODE_MAGIC
log(guess_filetype, 'File looks like a UBIFS image.')
else:
ftype = None
error(guess_filetype, 'Fatal', 'Could not determine file type.')
return ftype
def guess_leb_size(path):
"""Get LEB size from superblock
Arguments:
Str:path -- Path to file.
Returns:
Int -- LEB size.
Searches file for superblock and retrieves leb size.
"""
f = open(path, 'rb')
f.seek(0,2)
file_size = f.tell()+1
f.seek(0)
block_size = None
for _ in range(0, file_size, FILE_CHUNK_SZ):
buf = f.read(FILE_CHUNK_SZ)
for m in re.finditer(UBIFS_NODE_MAGIC, buf):
start = m.start()
chdr = nodes.common_hdr(buf[start:start+UBIFS_COMMON_HDR_SZ])
if chdr and chdr.node_type == UBIFS_SB_NODE:
sb_start = start + UBIFS_COMMON_HDR_SZ
sb_end = sb_start + UBIFS_SB_NODE_SZ
if chdr.len != len(buf[sb_start:sb_end]):
f.seek(sb_start)
buf = f.read(UBIFS_SB_NODE_SZ)
else:
buf = buf[sb_start:sb_end]
sbn = nodes.sb_node(buf)
block_size = sbn.leb_size
f.close()
return block_size
f.close()
return block_size
def guess_peb_size(path):
"""Determine the most likely block size
Arguments:
Str:path -- Path to file.
Returns:
Int -- PEB size.
Searches file for Magic Number, picks most
common length between them.
"""
file_offset = 0
offsets = []
f = open(path, 'rb')
f.seek(0,2)
file_size = f.tell()+1
f.seek(0)
for _ in range(0, file_size, FILE_CHUNK_SZ):
buf = f.read(FILE_CHUNK_SZ)
for m in re.finditer(UBI_EC_HDR_MAGIC, buf):
start = m.start()
if not file_offset:
file_offset = start
idx = start
else:
idx = start+file_offset
offsets.append(idx)
file_offset += FILE_CHUNK_SZ
f.close()
occurances = {}
for i in range(0, len(offsets)):
try:
diff = offsets[i] - offsets[i-1]
except:
diff = offsets[i]
if diff not in occurances:
occurances[diff] = 0
occurances[diff] += 1
most_frequent = 0
block_size = None
for offset in occurances:
if occurances[offset] > most_frequent:
most_frequent = occurances[offset]
block_size = offset
return block_size
|
edx/event-tracking
|
eventtracking/backends/mongodb.py
|
MongoBackend._create_indexes
|
python
|
def _create_indexes(self):
# WARNING: The collection will be locked during the index
# creation. If the collection has a large number of
# documents in it, the operation can take a long time.
# TODO: The creation of indexes can be moved to a Django
# management command or equivalent. There is also an option to
# run the indexing on the background, without locking.
self.collection.ensure_index([('time', pymongo.DESCENDING)])
self.collection.ensure_index('name')
|
Ensures the proper fields are indexed
|
train
|
https://github.com/edx/event-tracking/blob/8f993560545061d77f11615f5e3865b3916d5ea9/eventtracking/backends/mongodb.py#L75-L85
| null |
class MongoBackend(object):
"""Class for a MongoDB event tracker Backend"""
def __init__(self, **kwargs):
"""
Connect to a MongoDB.
:Parameters:
- `host`: hostname
- `port`: port
- `user`: collection username
- `password`: collection user password
- `database`: name of the database
- `collection`: name of the collection
- `extra`: parameters to pymongo.MongoClient not listed above
"""
super(MongoBackend, self).__init__()
# Extract connection parameters from kwargs
host = kwargs.get('host', 'localhost')
port = kwargs.get('port', 27017)
user = kwargs.get('user', '')
password = kwargs.get('password', '')
db_name = kwargs.get('database', 'eventtracking')
collection_name = kwargs.get('collection', 'events')
# Other mongo connection arguments
extra = kwargs.get('extra', {})
# By default disable write acknowledgments, reducing the time
# blocking during an insert
extra['w'] = extra.get('w', 0)
# Make timezone aware by default
extra['tz_aware'] = extra.get('tz_aware', True)
# Connect to database and get collection
self.connection = MongoClient(
host=host,
port=port,
**extra
)
self.database = self.connection[db_name]
if user or password:
self.database.authenticate(user, password)
self.collection = self.database[collection_name]
self._create_indexes()
def send(self, event):
"""Insert the event in to the Mongo collection"""
try:
self.collection.insert(event, manipulate=False)
except (PyMongoError, BSONError):
# The event will be lost in case of a connection error or any error
# that occurs when trying to insert the event into Mongo.
# pymongo will re-connect/re-authenticate automatically
# during the next event.
msg = 'Error inserting to MongoDB event tracker backend'
log.exception(msg)
|
edx/event-tracking
|
eventtracking/backends/mongodb.py
|
MongoBackend.send
|
python
|
def send(self, event):
try:
self.collection.insert(event, manipulate=False)
except (PyMongoError, BSONError):
# The event will be lost in case of a connection error or any error
# that occurs when trying to insert the event into Mongo.
# pymongo will re-connect/re-authenticate automatically
# during the next event.
msg = 'Error inserting to MongoDB event tracker backend'
log.exception(msg)
|
Insert the event in to the Mongo collection
|
train
|
https://github.com/edx/event-tracking/blob/8f993560545061d77f11615f5e3865b3916d5ea9/eventtracking/backends/mongodb.py#L87-L97
| null |
class MongoBackend(object):
"""Class for a MongoDB event tracker Backend"""
def __init__(self, **kwargs):
"""
Connect to a MongoDB.
:Parameters:
- `host`: hostname
- `port`: port
- `user`: collection username
- `password`: collection user password
- `database`: name of the database
- `collection`: name of the collection
- `extra`: parameters to pymongo.MongoClient not listed above
"""
super(MongoBackend, self).__init__()
# Extract connection parameters from kwargs
host = kwargs.get('host', 'localhost')
port = kwargs.get('port', 27017)
user = kwargs.get('user', '')
password = kwargs.get('password', '')
db_name = kwargs.get('database', 'eventtracking')
collection_name = kwargs.get('collection', 'events')
# Other mongo connection arguments
extra = kwargs.get('extra', {})
# By default disable write acknowledgments, reducing the time
# blocking during an insert
extra['w'] = extra.get('w', 0)
# Make timezone aware by default
extra['tz_aware'] = extra.get('tz_aware', True)
# Connect to database and get collection
self.connection = MongoClient(
host=host,
port=port,
**extra
)
self.database = self.connection[db_name]
if user or password:
self.database.authenticate(user, password)
self.collection = self.database[collection_name]
self._create_indexes()
def _create_indexes(self):
"""Ensures the proper fields are indexed"""
# WARNING: The collection will be locked during the index
# creation. If the collection has a large number of
# documents in it, the operation can take a long time.
# TODO: The creation of indexes can be moved to a Django
# management command or equivalent. There is also an option to
# run the indexing on the background, without locking.
self.collection.ensure_index([('time', pymongo.DESCENDING)])
self.collection.ensure_index('name')
|
edx/event-tracking
|
eventtracking/django/__init__.py
|
DjangoTracker.create_backends_from_settings
|
python
|
def create_backends_from_settings(self):
config = getattr(settings, DJANGO_BACKEND_SETTING_NAME, {})
backends = self.instantiate_objects(config)
return backends
|
Expects the Django setting "EVENT_TRACKING_BACKENDS" to be defined and point
to a dictionary of backend engine configurations.
Example::
EVENT_TRACKING_BACKENDS = {
'default': {
'ENGINE': 'some.arbitrary.Backend',
'OPTIONS': {
'endpoint': 'http://something/event'
}
},
'another_engine': {
'ENGINE': 'some.arbitrary.OtherBackend',
'OPTIONS': {
'user': 'foo'
}
},
}
|
train
|
https://github.com/edx/event-tracking/blob/8f993560545061d77f11615f5e3865b3916d5ea9/eventtracking/django/__init__.py#L31-L57
|
[
"def instantiate_objects(self, node):\n \"\"\"\n Recursively traverse a structure to identify dictionaries that represent objects that need to be instantiated\n\n Traverse all values of all dictionaries and all elements of all lists to identify dictionaries that contain the\n special \"ENGINE\" key which indicates that a class of that type should be instantiated and passed all key-value\n pairs found in the sibling \"OPTIONS\" dictionary as keyword arguments.\n\n For example::\n\n tree = {\n 'a': {\n 'b': {\n 'first_obj': {\n 'ENGINE': 'mypackage.mymodule.Clazz',\n 'OPTIONS': {\n 'size': 10,\n 'foo': 'bar'\n }\n }\n },\n 'c': [\n {\n 'ENGINE': 'mypackage.mymodule.Clazz2',\n 'OPTIONS': {\n 'more_objects': {\n 'd': {'ENGINE': 'mypackage.foo.Bar'}\n }\n }\n }\n ]\n }\n }\n root = self.instantiate_objects(tree)\n\n That structure of dicts, lists, and strings will end up with (this example assumes that all keyword arguments to\n constructors were saved as attributes of the same name):\n\n assert type(root['a']['b']['first_obj']) == <type 'mypackage.mymodule.Clazz'>\n assert root['a']['b']['first_obj'].size == 10\n assert root['a']['b']['first_obj'].foo == 'bar'\n assert type(root['a']['c'][0]) == <type 'mypackage.mymodule.Clazz2'>\n assert type(root['a']['c'][0].more_objects['d']) == <type 'mypackage.foo.Bar'>\n \"\"\"\n result = node\n if isinstance(node, dict):\n if 'ENGINE' in node:\n result = self.instantiate_from_dict(node)\n else:\n result = {}\n for key, value in six.iteritems(node):\n result[key] = self.instantiate_objects(value)\n elif isinstance(node, list):\n result = []\n for child in node:\n result.append(self.instantiate_objects(child))\n\n return result\n"
] |
class DjangoTracker(Tracker):
"""
A `eventtracking.tracker.Tracker` that constructs its backends from
Django settings.
"""
def __init__(self):
backends = self.create_backends_from_settings()
processors = self.create_processors_from_settings()
super(DjangoTracker, self).__init__(backends, ThreadLocalContextLocator(), processors)
def instantiate_objects(self, node):
"""
Recursively traverse a structure to identify dictionaries that represent objects that need to be instantiated
Traverse all values of all dictionaries and all elements of all lists to identify dictionaries that contain the
special "ENGINE" key which indicates that a class of that type should be instantiated and passed all key-value
pairs found in the sibling "OPTIONS" dictionary as keyword arguments.
For example::
tree = {
'a': {
'b': {
'first_obj': {
'ENGINE': 'mypackage.mymodule.Clazz',
'OPTIONS': {
'size': 10,
'foo': 'bar'
}
}
},
'c': [
{
'ENGINE': 'mypackage.mymodule.Clazz2',
'OPTIONS': {
'more_objects': {
'd': {'ENGINE': 'mypackage.foo.Bar'}
}
}
}
]
}
}
root = self.instantiate_objects(tree)
That structure of dicts, lists, and strings will end up with (this example assumes that all keyword arguments to
constructors were saved as attributes of the same name):
assert type(root['a']['b']['first_obj']) == <type 'mypackage.mymodule.Clazz'>
assert root['a']['b']['first_obj'].size == 10
assert root['a']['b']['first_obj'].foo == 'bar'
assert type(root['a']['c'][0]) == <type 'mypackage.mymodule.Clazz2'>
assert type(root['a']['c'][0].more_objects['d']) == <type 'mypackage.foo.Bar'>
"""
result = node
if isinstance(node, dict):
if 'ENGINE' in node:
result = self.instantiate_from_dict(node)
else:
result = {}
for key, value in six.iteritems(node):
result[key] = self.instantiate_objects(value)
elif isinstance(node, list):
result = []
for child in node:
result.append(self.instantiate_objects(child))
return result
def instantiate_from_dict(self, values):
"""
Constructs an object given a dictionary containing an "ENGINE" key
which contains the full module path to the class, and an "OPTIONS"
key which contains a dictionary that will be passed in to the
constructor as keyword args.
"""
name = values['ENGINE']
options = values.get('OPTIONS', {})
# Parse the name
parts = name.split('.')
module_name = '.'.join(parts[:-1])
class_name = parts[-1]
# Get the class
try:
module = import_module(module_name)
cls = getattr(module, class_name)
except (ValueError, AttributeError, TypeError, ImportError):
raise ValueError('Cannot find class %s' % name)
options = self.instantiate_objects(options)
return cls(**options)
def create_processors_from_settings(self):
"""
Expects the Django setting "EVENT_TRACKING_PROCESSORS" to be defined and
point to a list of backend engine configurations.
Example::
EVENT_TRACKING_PROCESSORS = [
{
'ENGINE': 'some.arbitrary.Processor'
},
{
'ENGINE': 'some.arbitrary.OtherProcessor',
'OPTIONS': {
'user': 'foo'
}
},
]
"""
config = getattr(settings, DJANGO_PROCESSOR_SETTING_NAME, [])
processors = self.instantiate_objects(config)
return processors
|
edx/event-tracking
|
eventtracking/django/__init__.py
|
DjangoTracker.instantiate_objects
|
python
|
def instantiate_objects(self, node):
result = node
if isinstance(node, dict):
if 'ENGINE' in node:
result = self.instantiate_from_dict(node)
else:
result = {}
for key, value in six.iteritems(node):
result[key] = self.instantiate_objects(value)
elif isinstance(node, list):
result = []
for child in node:
result.append(self.instantiate_objects(child))
return result
|
Recursively traverse a structure to identify dictionaries that represent objects that need to be instantiated
Traverse all values of all dictionaries and all elements of all lists to identify dictionaries that contain the
special "ENGINE" key which indicates that a class of that type should be instantiated and passed all key-value
pairs found in the sibling "OPTIONS" dictionary as keyword arguments.
For example::
tree = {
'a': {
'b': {
'first_obj': {
'ENGINE': 'mypackage.mymodule.Clazz',
'OPTIONS': {
'size': 10,
'foo': 'bar'
}
}
},
'c': [
{
'ENGINE': 'mypackage.mymodule.Clazz2',
'OPTIONS': {
'more_objects': {
'd': {'ENGINE': 'mypackage.foo.Bar'}
}
}
}
]
}
}
root = self.instantiate_objects(tree)
That structure of dicts, lists, and strings will end up with (this example assumes that all keyword arguments to
constructors were saved as attributes of the same name):
assert type(root['a']['b']['first_obj']) == <type 'mypackage.mymodule.Clazz'>
assert root['a']['b']['first_obj'].size == 10
assert root['a']['b']['first_obj'].foo == 'bar'
assert type(root['a']['c'][0]) == <type 'mypackage.mymodule.Clazz2'>
assert type(root['a']['c'][0].more_objects['d']) == <type 'mypackage.foo.Bar'>
|
train
|
https://github.com/edx/event-tracking/blob/8f993560545061d77f11615f5e3865b3916d5ea9/eventtracking/django/__init__.py#L59-L116
|
[
"def instantiate_objects(self, node):\n \"\"\"\n Recursively traverse a structure to identify dictionaries that represent objects that need to be instantiated\n\n Traverse all values of all dictionaries and all elements of all lists to identify dictionaries that contain the\n special \"ENGINE\" key which indicates that a class of that type should be instantiated and passed all key-value\n pairs found in the sibling \"OPTIONS\" dictionary as keyword arguments.\n\n For example::\n\n tree = {\n 'a': {\n 'b': {\n 'first_obj': {\n 'ENGINE': 'mypackage.mymodule.Clazz',\n 'OPTIONS': {\n 'size': 10,\n 'foo': 'bar'\n }\n }\n },\n 'c': [\n {\n 'ENGINE': 'mypackage.mymodule.Clazz2',\n 'OPTIONS': {\n 'more_objects': {\n 'd': {'ENGINE': 'mypackage.foo.Bar'}\n }\n }\n }\n ]\n }\n }\n root = self.instantiate_objects(tree)\n\n That structure of dicts, lists, and strings will end up with (this example assumes that all keyword arguments to\n constructors were saved as attributes of the same name):\n\n assert type(root['a']['b']['first_obj']) == <type 'mypackage.mymodule.Clazz'>\n assert root['a']['b']['first_obj'].size == 10\n assert root['a']['b']['first_obj'].foo == 'bar'\n assert type(root['a']['c'][0]) == <type 'mypackage.mymodule.Clazz2'>\n assert type(root['a']['c'][0].more_objects['d']) == <type 'mypackage.foo.Bar'>\n \"\"\"\n result = node\n if isinstance(node, dict):\n if 'ENGINE' in node:\n result = self.instantiate_from_dict(node)\n else:\n result = {}\n for key, value in six.iteritems(node):\n result[key] = self.instantiate_objects(value)\n elif isinstance(node, list):\n result = []\n for child in node:\n result.append(self.instantiate_objects(child))\n\n return result\n",
"def instantiate_from_dict(self, values):\n \"\"\"\n Constructs an object given a dictionary containing an \"ENGINE\" key\n which contains the full module path to the class, and an \"OPTIONS\"\n key which contains a dictionary that will be passed in to the\n constructor as keyword args.\n \"\"\"\n\n name = values['ENGINE']\n options = values.get('OPTIONS', {})\n\n # Parse the name\n parts = name.split('.')\n module_name = '.'.join(parts[:-1])\n class_name = parts[-1]\n\n # Get the class\n try:\n module = import_module(module_name)\n cls = getattr(module, class_name)\n except (ValueError, AttributeError, TypeError, ImportError):\n raise ValueError('Cannot find class %s' % name)\n\n options = self.instantiate_objects(options)\n\n return cls(**options)\n"
] |
class DjangoTracker(Tracker):
"""
A `eventtracking.tracker.Tracker` that constructs its backends from
Django settings.
"""
def __init__(self):
backends = self.create_backends_from_settings()
processors = self.create_processors_from_settings()
super(DjangoTracker, self).__init__(backends, ThreadLocalContextLocator(), processors)
def create_backends_from_settings(self):
"""
Expects the Django setting "EVENT_TRACKING_BACKENDS" to be defined and point
to a dictionary of backend engine configurations.
Example::
EVENT_TRACKING_BACKENDS = {
'default': {
'ENGINE': 'some.arbitrary.Backend',
'OPTIONS': {
'endpoint': 'http://something/event'
}
},
'another_engine': {
'ENGINE': 'some.arbitrary.OtherBackend',
'OPTIONS': {
'user': 'foo'
}
},
}
"""
config = getattr(settings, DJANGO_BACKEND_SETTING_NAME, {})
backends = self.instantiate_objects(config)
return backends
def instantiate_from_dict(self, values):
"""
Constructs an object given a dictionary containing an "ENGINE" key
which contains the full module path to the class, and an "OPTIONS"
key which contains a dictionary that will be passed in to the
constructor as keyword args.
"""
name = values['ENGINE']
options = values.get('OPTIONS', {})
# Parse the name
parts = name.split('.')
module_name = '.'.join(parts[:-1])
class_name = parts[-1]
# Get the class
try:
module = import_module(module_name)
cls = getattr(module, class_name)
except (ValueError, AttributeError, TypeError, ImportError):
raise ValueError('Cannot find class %s' % name)
options = self.instantiate_objects(options)
return cls(**options)
def create_processors_from_settings(self):
"""
Expects the Django setting "EVENT_TRACKING_PROCESSORS" to be defined and
point to a list of backend engine configurations.
Example::
EVENT_TRACKING_PROCESSORS = [
{
'ENGINE': 'some.arbitrary.Processor'
},
{
'ENGINE': 'some.arbitrary.OtherProcessor',
'OPTIONS': {
'user': 'foo'
}
},
]
"""
config = getattr(settings, DJANGO_PROCESSOR_SETTING_NAME, [])
processors = self.instantiate_objects(config)
return processors
|
edx/event-tracking
|
eventtracking/django/__init__.py
|
DjangoTracker.instantiate_from_dict
|
python
|
def instantiate_from_dict(self, values):
name = values['ENGINE']
options = values.get('OPTIONS', {})
# Parse the name
parts = name.split('.')
module_name = '.'.join(parts[:-1])
class_name = parts[-1]
# Get the class
try:
module = import_module(module_name)
cls = getattr(module, class_name)
except (ValueError, AttributeError, TypeError, ImportError):
raise ValueError('Cannot find class %s' % name)
options = self.instantiate_objects(options)
return cls(**options)
|
Constructs an object given a dictionary containing an "ENGINE" key
which contains the full module path to the class, and an "OPTIONS"
key which contains a dictionary that will be passed in to the
constructor as keyword args.
|
train
|
https://github.com/edx/event-tracking/blob/8f993560545061d77f11615f5e3865b3916d5ea9/eventtracking/django/__init__.py#L118-L143
|
[
"def instantiate_objects(self, node):\n \"\"\"\n Recursively traverse a structure to identify dictionaries that represent objects that need to be instantiated\n\n Traverse all values of all dictionaries and all elements of all lists to identify dictionaries that contain the\n special \"ENGINE\" key which indicates that a class of that type should be instantiated and passed all key-value\n pairs found in the sibling \"OPTIONS\" dictionary as keyword arguments.\n\n For example::\n\n tree = {\n 'a': {\n 'b': {\n 'first_obj': {\n 'ENGINE': 'mypackage.mymodule.Clazz',\n 'OPTIONS': {\n 'size': 10,\n 'foo': 'bar'\n }\n }\n },\n 'c': [\n {\n 'ENGINE': 'mypackage.mymodule.Clazz2',\n 'OPTIONS': {\n 'more_objects': {\n 'd': {'ENGINE': 'mypackage.foo.Bar'}\n }\n }\n }\n ]\n }\n }\n root = self.instantiate_objects(tree)\n\n That structure of dicts, lists, and strings will end up with (this example assumes that all keyword arguments to\n constructors were saved as attributes of the same name):\n\n assert type(root['a']['b']['first_obj']) == <type 'mypackage.mymodule.Clazz'>\n assert root['a']['b']['first_obj'].size == 10\n assert root['a']['b']['first_obj'].foo == 'bar'\n assert type(root['a']['c'][0]) == <type 'mypackage.mymodule.Clazz2'>\n assert type(root['a']['c'][0].more_objects['d']) == <type 'mypackage.foo.Bar'>\n \"\"\"\n result = node\n if isinstance(node, dict):\n if 'ENGINE' in node:\n result = self.instantiate_from_dict(node)\n else:\n result = {}\n for key, value in six.iteritems(node):\n result[key] = self.instantiate_objects(value)\n elif isinstance(node, list):\n result = []\n for child in node:\n result.append(self.instantiate_objects(child))\n\n return result\n"
] |
class DjangoTracker(Tracker):
"""
A `eventtracking.tracker.Tracker` that constructs its backends from
Django settings.
"""
def __init__(self):
backends = self.create_backends_from_settings()
processors = self.create_processors_from_settings()
super(DjangoTracker, self).__init__(backends, ThreadLocalContextLocator(), processors)
def create_backends_from_settings(self):
"""
Expects the Django setting "EVENT_TRACKING_BACKENDS" to be defined and point
to a dictionary of backend engine configurations.
Example::
EVENT_TRACKING_BACKENDS = {
'default': {
'ENGINE': 'some.arbitrary.Backend',
'OPTIONS': {
'endpoint': 'http://something/event'
}
},
'another_engine': {
'ENGINE': 'some.arbitrary.OtherBackend',
'OPTIONS': {
'user': 'foo'
}
},
}
"""
config = getattr(settings, DJANGO_BACKEND_SETTING_NAME, {})
backends = self.instantiate_objects(config)
return backends
def instantiate_objects(self, node):
"""
Recursively traverse a structure to identify dictionaries that represent objects that need to be instantiated
Traverse all values of all dictionaries and all elements of all lists to identify dictionaries that contain the
special "ENGINE" key which indicates that a class of that type should be instantiated and passed all key-value
pairs found in the sibling "OPTIONS" dictionary as keyword arguments.
For example::
tree = {
'a': {
'b': {
'first_obj': {
'ENGINE': 'mypackage.mymodule.Clazz',
'OPTIONS': {
'size': 10,
'foo': 'bar'
}
}
},
'c': [
{
'ENGINE': 'mypackage.mymodule.Clazz2',
'OPTIONS': {
'more_objects': {
'd': {'ENGINE': 'mypackage.foo.Bar'}
}
}
}
]
}
}
root = self.instantiate_objects(tree)
That structure of dicts, lists, and strings will end up with (this example assumes that all keyword arguments to
constructors were saved as attributes of the same name):
assert type(root['a']['b']['first_obj']) == <type 'mypackage.mymodule.Clazz'>
assert root['a']['b']['first_obj'].size == 10
assert root['a']['b']['first_obj'].foo == 'bar'
assert type(root['a']['c'][0]) == <type 'mypackage.mymodule.Clazz2'>
assert type(root['a']['c'][0].more_objects['d']) == <type 'mypackage.foo.Bar'>
"""
result = node
if isinstance(node, dict):
if 'ENGINE' in node:
result = self.instantiate_from_dict(node)
else:
result = {}
for key, value in six.iteritems(node):
result[key] = self.instantiate_objects(value)
elif isinstance(node, list):
result = []
for child in node:
result.append(self.instantiate_objects(child))
return result
def create_processors_from_settings(self):
"""
Expects the Django setting "EVENT_TRACKING_PROCESSORS" to be defined and
point to a list of backend engine configurations.
Example::
EVENT_TRACKING_PROCESSORS = [
{
'ENGINE': 'some.arbitrary.Processor'
},
{
'ENGINE': 'some.arbitrary.OtherProcessor',
'OPTIONS': {
'user': 'foo'
}
},
]
"""
config = getattr(settings, DJANGO_PROCESSOR_SETTING_NAME, [])
processors = self.instantiate_objects(config)
return processors
|
edx/event-tracking
|
eventtracking/django/__init__.py
|
DjangoTracker.create_processors_from_settings
|
python
|
def create_processors_from_settings(self):
config = getattr(settings, DJANGO_PROCESSOR_SETTING_NAME, [])
processors = self.instantiate_objects(config)
return processors
|
Expects the Django setting "EVENT_TRACKING_PROCESSORS" to be defined and
point to a list of backend engine configurations.
Example::
EVENT_TRACKING_PROCESSORS = [
{
'ENGINE': 'some.arbitrary.Processor'
},
{
'ENGINE': 'some.arbitrary.OtherProcessor',
'OPTIONS': {
'user': 'foo'
}
},
]
|
train
|
https://github.com/edx/event-tracking/blob/8f993560545061d77f11615f5e3865b3916d5ea9/eventtracking/django/__init__.py#L145-L168
|
[
"def instantiate_objects(self, node):\n \"\"\"\n Recursively traverse a structure to identify dictionaries that represent objects that need to be instantiated\n\n Traverse all values of all dictionaries and all elements of all lists to identify dictionaries that contain the\n special \"ENGINE\" key which indicates that a class of that type should be instantiated and passed all key-value\n pairs found in the sibling \"OPTIONS\" dictionary as keyword arguments.\n\n For example::\n\n tree = {\n 'a': {\n 'b': {\n 'first_obj': {\n 'ENGINE': 'mypackage.mymodule.Clazz',\n 'OPTIONS': {\n 'size': 10,\n 'foo': 'bar'\n }\n }\n },\n 'c': [\n {\n 'ENGINE': 'mypackage.mymodule.Clazz2',\n 'OPTIONS': {\n 'more_objects': {\n 'd': {'ENGINE': 'mypackage.foo.Bar'}\n }\n }\n }\n ]\n }\n }\n root = self.instantiate_objects(tree)\n\n That structure of dicts, lists, and strings will end up with (this example assumes that all keyword arguments to\n constructors were saved as attributes of the same name):\n\n assert type(root['a']['b']['first_obj']) == <type 'mypackage.mymodule.Clazz'>\n assert root['a']['b']['first_obj'].size == 10\n assert root['a']['b']['first_obj'].foo == 'bar'\n assert type(root['a']['c'][0]) == <type 'mypackage.mymodule.Clazz2'>\n assert type(root['a']['c'][0].more_objects['d']) == <type 'mypackage.foo.Bar'>\n \"\"\"\n result = node\n if isinstance(node, dict):\n if 'ENGINE' in node:\n result = self.instantiate_from_dict(node)\n else:\n result = {}\n for key, value in six.iteritems(node):\n result[key] = self.instantiate_objects(value)\n elif isinstance(node, list):\n result = []\n for child in node:\n result.append(self.instantiate_objects(child))\n\n return result\n"
] |
class DjangoTracker(Tracker):
"""
A `eventtracking.tracker.Tracker` that constructs its backends from
Django settings.
"""
def __init__(self):
backends = self.create_backends_from_settings()
processors = self.create_processors_from_settings()
super(DjangoTracker, self).__init__(backends, ThreadLocalContextLocator(), processors)
def create_backends_from_settings(self):
"""
Expects the Django setting "EVENT_TRACKING_BACKENDS" to be defined and point
to a dictionary of backend engine configurations.
Example::
EVENT_TRACKING_BACKENDS = {
'default': {
'ENGINE': 'some.arbitrary.Backend',
'OPTIONS': {
'endpoint': 'http://something/event'
}
},
'another_engine': {
'ENGINE': 'some.arbitrary.OtherBackend',
'OPTIONS': {
'user': 'foo'
}
},
}
"""
config = getattr(settings, DJANGO_BACKEND_SETTING_NAME, {})
backends = self.instantiate_objects(config)
return backends
def instantiate_objects(self, node):
"""
Recursively traverse a structure to identify dictionaries that represent objects that need to be instantiated
Traverse all values of all dictionaries and all elements of all lists to identify dictionaries that contain the
special "ENGINE" key which indicates that a class of that type should be instantiated and passed all key-value
pairs found in the sibling "OPTIONS" dictionary as keyword arguments.
For example::
tree = {
'a': {
'b': {
'first_obj': {
'ENGINE': 'mypackage.mymodule.Clazz',
'OPTIONS': {
'size': 10,
'foo': 'bar'
}
}
},
'c': [
{
'ENGINE': 'mypackage.mymodule.Clazz2',
'OPTIONS': {
'more_objects': {
'd': {'ENGINE': 'mypackage.foo.Bar'}
}
}
}
]
}
}
root = self.instantiate_objects(tree)
That structure of dicts, lists, and strings will end up with (this example assumes that all keyword arguments to
constructors were saved as attributes of the same name):
assert type(root['a']['b']['first_obj']) == <type 'mypackage.mymodule.Clazz'>
assert root['a']['b']['first_obj'].size == 10
assert root['a']['b']['first_obj'].foo == 'bar'
assert type(root['a']['c'][0]) == <type 'mypackage.mymodule.Clazz2'>
assert type(root['a']['c'][0].more_objects['d']) == <type 'mypackage.foo.Bar'>
"""
result = node
if isinstance(node, dict):
if 'ENGINE' in node:
result = self.instantiate_from_dict(node)
else:
result = {}
for key, value in six.iteritems(node):
result[key] = self.instantiate_objects(value)
elif isinstance(node, list):
result = []
for child in node:
result.append(self.instantiate_objects(child))
return result
def instantiate_from_dict(self, values):
"""
Constructs an object given a dictionary containing an "ENGINE" key
which contains the full module path to the class, and an "OPTIONS"
key which contains a dictionary that will be passed in to the
constructor as keyword args.
"""
name = values['ENGINE']
options = values.get('OPTIONS', {})
# Parse the name
parts = name.split('.')
module_name = '.'.join(parts[:-1])
class_name = parts[-1]
# Get the class
try:
module = import_module(module_name)
cls = getattr(module, class_name)
except (ValueError, AttributeError, TypeError, ImportError):
raise ValueError('Cannot find class %s' % name)
options = self.instantiate_objects(options)
return cls(**options)
|
edx/event-tracking
|
eventtracking/backends/routing.py
|
RoutingBackend.register_backend
|
python
|
def register_backend(self, name, backend):
if not hasattr(backend, 'send') or not callable(backend.send):
raise ValueError('Backend %s does not have a callable "send" method.' % backend.__class__.__name__)
else:
self.backends[name] = backend
|
Register a new backend that will be called for each processed event.
Note that backends are called in the order that they are registered.
|
train
|
https://github.com/edx/event-tracking/blob/8f993560545061d77f11615f5e3865b3916d5ea9/eventtracking/backends/routing.py#L55-L64
| null |
class RoutingBackend(object):
"""
Route events to the appropriate backends.
A routing backend has two types of components:
1) Processors - These are run sequentially, processing the output of the previous processor. If you had three
processors [a, b, c], the output of the processing step would be `c(b(a(event)))`. Note that for performance
reasons, the processor is able to actually mutate the event dictionary in-place. Event dictionaries may be large
and highly nested, so creating multiple copies could be problematic. A processor can also choose to prevent the
event from being emitted by raising `EventEmissionExit`. Doing so will prevent any subsequent processors from
running and prevent the event from being sent to the backends. Any other exception raised by a processor will be
logged and swallowed, subsequent processors will execute and the event will be emitted.
2) Backends - Backends are intended to not mutate the event and each receive the same event data. They are not
chained like processors. Once an event has been processed by the processor chain, it is passed to each backend in
the order that they were registered. Backends typically persist the event in some way, either by sending it
to an external system or saving it to disk. They are called synchronously and in sequence, so a long running
backend will block other backends until it is done persisting the event. Note that you can register another
`RoutingBackend` as a backend of a `RoutingBackend`, allowing for arbitrary processing trees.
`backends` is a collection that supports iteration over its items using `iteritems()`. The keys are expected to be
sortable and the values are expected to expose a `send(event)` method that will be called for each event. Each
backend in this collection is registered in order sorted alphanumeric ascending by key.
`processors` is an iterable of callables.
Raises a `ValueError` if any of the provided backends do not have a callable "send" attribute or any of the
processors are not callable.
"""
def __init__(self, backends=None, processors=None):
self.backends = OrderedDict()
self.processors = []
if backends is not None:
for name in sorted(backends.keys()):
self.register_backend(name, backends[name])
if processors is not None:
for processor in processors:
self.register_processor(processor)
def register_processor(self, processor):
"""
Register a new processor.
Note that processors are called in the order that they are registered.
"""
if not callable(processor):
raise ValueError('Processor %s is not callable.' % processor.__class__.__name__)
else:
self.processors.append(processor)
def send(self, event):
"""
Process the event using all registered processors and send it to all registered backends.
Logs and swallows all `Exception`.
"""
try:
processed_event = self.process_event(event)
except EventEmissionExit:
return
else:
self.send_to_backends(processed_event)
def process_event(self, event):
"""
Executes all event processors on the event in order.
`event` is a nested dictionary that represents the event.
Logs and swallows all `Exception` except `EventEmissionExit` which is re-raised if it is raised by a processor.
Returns the modified event.
"""
if len(self.processors) == 0: # lint-amnesty, pylint: disable=len-as-condition
return event
processed_event = event
for processor in self.processors:
try:
modified_event = processor(processed_event)
if modified_event is not None:
processed_event = modified_event
except EventEmissionExit:
raise
except Exception: # pylint: disable=broad-except
LOG.exception(
'Failed to execute processor: %s', str(processor)
)
return processed_event
def send_to_backends(self, event):
"""
Sends the event to all registered backends.
Logs and swallows all `Exception`.
"""
for name, backend in six.iteritems(self.backends):
try:
backend.send(event)
except Exception: # pylint: disable=broad-except
LOG.exception(
'Unable to send event to backend: %s', name
)
|
edx/event-tracking
|
eventtracking/backends/routing.py
|
RoutingBackend.register_processor
|
python
|
def register_processor(self, processor):
if not callable(processor):
raise ValueError('Processor %s is not callable.' % processor.__class__.__name__)
else:
self.processors.append(processor)
|
Register a new processor.
Note that processors are called in the order that they are registered.
|
train
|
https://github.com/edx/event-tracking/blob/8f993560545061d77f11615f5e3865b3916d5ea9/eventtracking/backends/routing.py#L66-L75
| null |
class RoutingBackend(object):
"""
Route events to the appropriate backends.
A routing backend has two types of components:
1) Processors - These are run sequentially, processing the output of the previous processor. If you had three
processors [a, b, c], the output of the processing step would be `c(b(a(event)))`. Note that for performance
reasons, the processor is able to actually mutate the event dictionary in-place. Event dictionaries may be large
and highly nested, so creating multiple copies could be problematic. A processor can also choose to prevent the
event from being emitted by raising `EventEmissionExit`. Doing so will prevent any subsequent processors from
running and prevent the event from being sent to the backends. Any other exception raised by a processor will be
logged and swallowed, subsequent processors will execute and the event will be emitted.
2) Backends - Backends are intended to not mutate the event and each receive the same event data. They are not
chained like processors. Once an event has been processed by the processor chain, it is passed to each backend in
the order that they were registered. Backends typically persist the event in some way, either by sending it
to an external system or saving it to disk. They are called synchronously and in sequence, so a long running
backend will block other backends until it is done persisting the event. Note that you can register another
`RoutingBackend` as a backend of a `RoutingBackend`, allowing for arbitrary processing trees.
`backends` is a collection that supports iteration over its items using `iteritems()`. The keys are expected to be
sortable and the values are expected to expose a `send(event)` method that will be called for each event. Each
backend in this collection is registered in order sorted alphanumeric ascending by key.
`processors` is an iterable of callables.
Raises a `ValueError` if any of the provided backends do not have a callable "send" attribute or any of the
processors are not callable.
"""
def __init__(self, backends=None, processors=None):
self.backends = OrderedDict()
self.processors = []
if backends is not None:
for name in sorted(backends.keys()):
self.register_backend(name, backends[name])
if processors is not None:
for processor in processors:
self.register_processor(processor)
def register_backend(self, name, backend):
"""
Register a new backend that will be called for each processed event.
Note that backends are called in the order that they are registered.
"""
if not hasattr(backend, 'send') or not callable(backend.send):
raise ValueError('Backend %s does not have a callable "send" method.' % backend.__class__.__name__)
else:
self.backends[name] = backend
def send(self, event):
"""
Process the event using all registered processors and send it to all registered backends.
Logs and swallows all `Exception`.
"""
try:
processed_event = self.process_event(event)
except EventEmissionExit:
return
else:
self.send_to_backends(processed_event)
def process_event(self, event):
"""
Executes all event processors on the event in order.
`event` is a nested dictionary that represents the event.
Logs and swallows all `Exception` except `EventEmissionExit` which is re-raised if it is raised by a processor.
Returns the modified event.
"""
if len(self.processors) == 0: # lint-amnesty, pylint: disable=len-as-condition
return event
processed_event = event
for processor in self.processors:
try:
modified_event = processor(processed_event)
if modified_event is not None:
processed_event = modified_event
except EventEmissionExit:
raise
except Exception: # pylint: disable=broad-except
LOG.exception(
'Failed to execute processor: %s', str(processor)
)
return processed_event
def send_to_backends(self, event):
"""
Sends the event to all registered backends.
Logs and swallows all `Exception`.
"""
for name, backend in six.iteritems(self.backends):
try:
backend.send(event)
except Exception: # pylint: disable=broad-except
LOG.exception(
'Unable to send event to backend: %s', name
)
|
edx/event-tracking
|
eventtracking/backends/routing.py
|
RoutingBackend.send
|
python
|
def send(self, event):
try:
processed_event = self.process_event(event)
except EventEmissionExit:
return
else:
self.send_to_backends(processed_event)
|
Process the event using all registered processors and send it to all registered backends.
Logs and swallows all `Exception`.
|
train
|
https://github.com/edx/event-tracking/blob/8f993560545061d77f11615f5e3865b3916d5ea9/eventtracking/backends/routing.py#L77-L88
|
[
"def process_event(self, event):\n \"\"\"\n\n Executes all event processors on the event in order.\n\n `event` is a nested dictionary that represents the event.\n\n Logs and swallows all `Exception` except `EventEmissionExit` which is re-raised if it is raised by a processor.\n\n Returns the modified event.\n \"\"\"\n\n if len(self.processors) == 0: # lint-amnesty, pylint: disable=len-as-condition\n return event\n\n processed_event = event\n\n for processor in self.processors:\n try:\n modified_event = processor(processed_event)\n if modified_event is not None:\n processed_event = modified_event\n except EventEmissionExit:\n raise\n except Exception: # pylint: disable=broad-except\n LOG.exception(\n 'Failed to execute processor: %s', str(processor)\n )\n\n return processed_event\n",
"def send_to_backends(self, event):\n \"\"\"\n Sends the event to all registered backends.\n\n Logs and swallows all `Exception`.\n \"\"\"\n\n for name, backend in six.iteritems(self.backends):\n try:\n backend.send(event)\n except Exception: # pylint: disable=broad-except\n LOG.exception(\n 'Unable to send event to backend: %s', name\n )\n"
] |
class RoutingBackend(object):
"""
Route events to the appropriate backends.
A routing backend has two types of components:
1) Processors - These are run sequentially, processing the output of the previous processor. If you had three
processors [a, b, c], the output of the processing step would be `c(b(a(event)))`. Note that for performance
reasons, the processor is able to actually mutate the event dictionary in-place. Event dictionaries may be large
and highly nested, so creating multiple copies could be problematic. A processor can also choose to prevent the
event from being emitted by raising `EventEmissionExit`. Doing so will prevent any subsequent processors from
running and prevent the event from being sent to the backends. Any other exception raised by a processor will be
logged and swallowed, subsequent processors will execute and the event will be emitted.
2) Backends - Backends are intended to not mutate the event and each receive the same event data. They are not
chained like processors. Once an event has been processed by the processor chain, it is passed to each backend in
the order that they were registered. Backends typically persist the event in some way, either by sending it
to an external system or saving it to disk. They are called synchronously and in sequence, so a long running
backend will block other backends until it is done persisting the event. Note that you can register another
`RoutingBackend` as a backend of a `RoutingBackend`, allowing for arbitrary processing trees.
`backends` is a collection that supports iteration over its items using `iteritems()`. The keys are expected to be
sortable and the values are expected to expose a `send(event)` method that will be called for each event. Each
backend in this collection is registered in order sorted alphanumeric ascending by key.
`processors` is an iterable of callables.
Raises a `ValueError` if any of the provided backends do not have a callable "send" attribute or any of the
processors are not callable.
"""
def __init__(self, backends=None, processors=None):
self.backends = OrderedDict()
self.processors = []
if backends is not None:
for name in sorted(backends.keys()):
self.register_backend(name, backends[name])
if processors is not None:
for processor in processors:
self.register_processor(processor)
def register_backend(self, name, backend):
"""
Register a new backend that will be called for each processed event.
Note that backends are called in the order that they are registered.
"""
if not hasattr(backend, 'send') or not callable(backend.send):
raise ValueError('Backend %s does not have a callable "send" method.' % backend.__class__.__name__)
else:
self.backends[name] = backend
def register_processor(self, processor):
"""
Register a new processor.
Note that processors are called in the order that they are registered.
"""
if not callable(processor):
raise ValueError('Processor %s is not callable.' % processor.__class__.__name__)
else:
self.processors.append(processor)
def process_event(self, event):
"""
Executes all event processors on the event in order.
`event` is a nested dictionary that represents the event.
Logs and swallows all `Exception` except `EventEmissionExit` which is re-raised if it is raised by a processor.
Returns the modified event.
"""
if len(self.processors) == 0: # lint-amnesty, pylint: disable=len-as-condition
return event
processed_event = event
for processor in self.processors:
try:
modified_event = processor(processed_event)
if modified_event is not None:
processed_event = modified_event
except EventEmissionExit:
raise
except Exception: # pylint: disable=broad-except
LOG.exception(
'Failed to execute processor: %s', str(processor)
)
return processed_event
def send_to_backends(self, event):
"""
Sends the event to all registered backends.
Logs and swallows all `Exception`.
"""
for name, backend in six.iteritems(self.backends):
try:
backend.send(event)
except Exception: # pylint: disable=broad-except
LOG.exception(
'Unable to send event to backend: %s', name
)
|
edx/event-tracking
|
eventtracking/backends/routing.py
|
RoutingBackend.send_to_backends
|
python
|
def send_to_backends(self, event):
for name, backend in six.iteritems(self.backends):
try:
backend.send(event)
except Exception: # pylint: disable=broad-except
LOG.exception(
'Unable to send event to backend: %s', name
)
|
Sends the event to all registered backends.
Logs and swallows all `Exception`.
|
train
|
https://github.com/edx/event-tracking/blob/8f993560545061d77f11615f5e3865b3916d5ea9/eventtracking/backends/routing.py#L121-L134
| null |
class RoutingBackend(object):
"""
Route events to the appropriate backends.
A routing backend has two types of components:
1) Processors - These are run sequentially, processing the output of the previous processor. If you had three
processors [a, b, c], the output of the processing step would be `c(b(a(event)))`. Note that for performance
reasons, the processor is able to actually mutate the event dictionary in-place. Event dictionaries may be large
and highly nested, so creating multiple copies could be problematic. A processor can also choose to prevent the
event from being emitted by raising `EventEmissionExit`. Doing so will prevent any subsequent processors from
running and prevent the event from being sent to the backends. Any other exception raised by a processor will be
logged and swallowed, subsequent processors will execute and the event will be emitted.
2) Backends - Backends are intended to not mutate the event and each receive the same event data. They are not
chained like processors. Once an event has been processed by the processor chain, it is passed to each backend in
the order that they were registered. Backends typically persist the event in some way, either by sending it
to an external system or saving it to disk. They are called synchronously and in sequence, so a long running
backend will block other backends until it is done persisting the event. Note that you can register another
`RoutingBackend` as a backend of a `RoutingBackend`, allowing for arbitrary processing trees.
`backends` is a collection that supports iteration over its items using `iteritems()`. The keys are expected to be
sortable and the values are expected to expose a `send(event)` method that will be called for each event. Each
backend in this collection is registered in order sorted alphanumeric ascending by key.
`processors` is an iterable of callables.
Raises a `ValueError` if any of the provided backends do not have a callable "send" attribute or any of the
processors are not callable.
"""
def __init__(self, backends=None, processors=None):
self.backends = OrderedDict()
self.processors = []
if backends is not None:
for name in sorted(backends.keys()):
self.register_backend(name, backends[name])
if processors is not None:
for processor in processors:
self.register_processor(processor)
def register_backend(self, name, backend):
"""
Register a new backend that will be called for each processed event.
Note that backends are called in the order that they are registered.
"""
if not hasattr(backend, 'send') or not callable(backend.send):
raise ValueError('Backend %s does not have a callable "send" method.' % backend.__class__.__name__)
else:
self.backends[name] = backend
def register_processor(self, processor):
"""
Register a new processor.
Note that processors are called in the order that they are registered.
"""
if not callable(processor):
raise ValueError('Processor %s is not callable.' % processor.__class__.__name__)
else:
self.processors.append(processor)
def send(self, event):
"""
Process the event using all registered processors and send it to all registered backends.
Logs and swallows all `Exception`.
"""
try:
processed_event = self.process_event(event)
except EventEmissionExit:
return
else:
self.send_to_backends(processed_event)
def process_event(self, event):
"""
Executes all event processors on the event in order.
`event` is a nested dictionary that represents the event.
Logs and swallows all `Exception` except `EventEmissionExit` which is re-raised if it is raised by a processor.
Returns the modified event.
"""
if len(self.processors) == 0: # lint-amnesty, pylint: disable=len-as-condition
return event
processed_event = event
for processor in self.processors:
try:
modified_event = processor(processed_event)
if modified_event is not None:
processed_event = modified_event
except EventEmissionExit:
raise
except Exception: # pylint: disable=broad-except
LOG.exception(
'Failed to execute processor: %s', str(processor)
)
return processed_event
|
edx/event-tracking
|
eventtracking/tracker.py
|
Tracker.emit
|
python
|
def emit(self, name=None, data=None):
event = {
'name': name or UNKNOWN_EVENT_TYPE,
'timestamp': datetime.now(UTC),
'data': data or {},
'context': self.resolve_context()
}
self.routing_backend.send(event)
|
Emit an event annotated with the UTC time when this function was called.
`name` is a unique identification string for an event that has
already been registered.
`data` is a dictionary mapping field names to the value to include in the event.
Note that all values provided must be serializable.
|
train
|
https://github.com/edx/event-tracking/blob/8f993560545061d77f11615f5e3865b3916d5ea9/eventtracking/tracker.py#L65-L82
|
[
"def resolve_context(self):\n \"\"\"\n Create a new dictionary that corresponds to the union of all of the\n contexts that have been entered but not exited at this point.\n \"\"\"\n merged = dict()\n for context in self.located_context.values():\n merged.update(context)\n return merged\n"
] |
class Tracker(object):
"""
Track application events. Holds references to a set of backends that will
be used to persist any events that are emitted.
"""
def __init__(self, backends=None, context_locator=None, processors=None):
self.routing_backend = RoutingBackend(backends=backends, processors=processors)
self.context_locator = context_locator or DefaultContextLocator()
@property
def located_context(self):
"""
The thread local context for this tracker.
"""
return self.context_locator.get()
def get_backend(self, name):
"""Gets the backend that was configured with `name`"""
return self.backends[name]
@property
def processors(self):
"""The list of registered processors"""
return self.routing_backend.processors
@property
def backends(self):
"""The dictionary of registered backends"""
return self.routing_backend.backends
def resolve_context(self):
"""
Create a new dictionary that corresponds to the union of all of the
contexts that have been entered but not exited at this point.
"""
merged = dict()
for context in self.located_context.values():
merged.update(context)
return merged
def enter_context(self, name, ctx):
"""
Enter a named context. Any events emitted after calling this
method will contain all of the key-value pairs included in `ctx`
unless overridden by a context that is entered after this call.
"""
self.located_context[name] = ctx
def exit_context(self, name):
"""
Exit a named context. This will remove all key-value pairs
associated with this context from any events emitted after it
is removed.
"""
del self.located_context[name]
@contextmanager
def context(self, name, ctx):
"""
Execute the block with the given context applied. This manager
ensures that the context is removed even if an exception is raised
within the context.
"""
self.enter_context(name, ctx)
try:
yield
finally:
self.exit_context(name)
|
edx/event-tracking
|
eventtracking/tracker.py
|
Tracker.resolve_context
|
python
|
def resolve_context(self):
merged = dict()
for context in self.located_context.values():
merged.update(context)
return merged
|
Create a new dictionary that corresponds to the union of all of the
contexts that have been entered but not exited at this point.
|
train
|
https://github.com/edx/event-tracking/blob/8f993560545061d77f11615f5e3865b3916d5ea9/eventtracking/tracker.py#L84-L92
| null |
class Tracker(object):
"""
Track application events. Holds references to a set of backends that will
be used to persist any events that are emitted.
"""
def __init__(self, backends=None, context_locator=None, processors=None):
self.routing_backend = RoutingBackend(backends=backends, processors=processors)
self.context_locator = context_locator or DefaultContextLocator()
@property
def located_context(self):
"""
The thread local context for this tracker.
"""
return self.context_locator.get()
def get_backend(self, name):
"""Gets the backend that was configured with `name`"""
return self.backends[name]
@property
def processors(self):
"""The list of registered processors"""
return self.routing_backend.processors
@property
def backends(self):
"""The dictionary of registered backends"""
return self.routing_backend.backends
def emit(self, name=None, data=None):
"""
Emit an event annotated with the UTC time when this function was called.
`name` is a unique identification string for an event that has
already been registered.
`data` is a dictionary mapping field names to the value to include in the event.
Note that all values provided must be serializable.
"""
event = {
'name': name or UNKNOWN_EVENT_TYPE,
'timestamp': datetime.now(UTC),
'data': data or {},
'context': self.resolve_context()
}
self.routing_backend.send(event)
def enter_context(self, name, ctx):
"""
Enter a named context. Any events emitted after calling this
method will contain all of the key-value pairs included in `ctx`
unless overridden by a context that is entered after this call.
"""
self.located_context[name] = ctx
def exit_context(self, name):
"""
Exit a named context. This will remove all key-value pairs
associated with this context from any events emitted after it
is removed.
"""
del self.located_context[name]
@contextmanager
def context(self, name, ctx):
"""
Execute the block with the given context applied. This manager
ensures that the context is removed even if an exception is raised
within the context.
"""
self.enter_context(name, ctx)
try:
yield
finally:
self.exit_context(name)
|
edx/event-tracking
|
eventtracking/tracker.py
|
Tracker.context
|
python
|
def context(self, name, ctx):
self.enter_context(name, ctx)
try:
yield
finally:
self.exit_context(name)
|
Execute the block with the given context applied. This manager
ensures that the context is removed even if an exception is raised
within the context.
|
train
|
https://github.com/edx/event-tracking/blob/8f993560545061d77f11615f5e3865b3916d5ea9/eventtracking/tracker.py#L111-L121
|
[
"def enter_context(self, name, ctx):\n \"\"\"\n Enter a named context. Any events emitted after calling this\n method will contain all of the key-value pairs included in `ctx`\n unless overridden by a context that is entered after this call.\n \"\"\"\n self.located_context[name] = ctx\n",
"def exit_context(self, name):\n \"\"\"\n Exit a named context. This will remove all key-value pairs\n associated with this context from any events emitted after it\n is removed.\n \"\"\"\n del self.located_context[name]\n"
] |
class Tracker(object):
"""
Track application events. Holds references to a set of backends that will
be used to persist any events that are emitted.
"""
def __init__(self, backends=None, context_locator=None, processors=None):
self.routing_backend = RoutingBackend(backends=backends, processors=processors)
self.context_locator = context_locator or DefaultContextLocator()
@property
def located_context(self):
"""
The thread local context for this tracker.
"""
return self.context_locator.get()
def get_backend(self, name):
"""Gets the backend that was configured with `name`"""
return self.backends[name]
@property
def processors(self):
"""The list of registered processors"""
return self.routing_backend.processors
@property
def backends(self):
"""The dictionary of registered backends"""
return self.routing_backend.backends
def emit(self, name=None, data=None):
"""
Emit an event annotated with the UTC time when this function was called.
`name` is a unique identification string for an event that has
already been registered.
`data` is a dictionary mapping field names to the value to include in the event.
Note that all values provided must be serializable.
"""
event = {
'name': name or UNKNOWN_EVENT_TYPE,
'timestamp': datetime.now(UTC),
'data': data or {},
'context': self.resolve_context()
}
self.routing_backend.send(event)
def resolve_context(self):
"""
Create a new dictionary that corresponds to the union of all of the
contexts that have been entered but not exited at this point.
"""
merged = dict()
for context in self.located_context.values():
merged.update(context)
return merged
def enter_context(self, name, ctx):
"""
Enter a named context. Any events emitted after calling this
method will contain all of the key-value pairs included in `ctx`
unless overridden by a context that is entered after this call.
"""
self.located_context[name] = ctx
def exit_context(self, name):
"""
Exit a named context. This will remove all key-value pairs
associated with this context from any events emitted after it
is removed.
"""
del self.located_context[name]
@contextmanager
|
edx/event-tracking
|
eventtracking/backends/segment.py
|
SegmentBackend.send
|
python
|
def send(self, event):
if analytics is None:
return
context = event.get('context', {})
user_id = context.get('user_id')
name = event.get('name')
if name is None or user_id is None:
return
segment_context = {}
ga_client_id = context.get('client_id')
if ga_client_id is not None:
segment_context['Google Analytics'] = {
'clientId': ga_client_id
}
ip_address = context.get('ip')
if ip_address is not None:
segment_context['ip'] = ip_address
user_agent = context.get('agent')
if user_agent is not None:
segment_context['userAgent'] = user_agent
path = context.get('path')
referer = context.get('referer')
page = context.get('page')
if path and not page:
# Try to put together a url from host and path, hardcoding the schema.
# (Segment doesn't care about the schema for GA, but will extract the host and path from the url.)
host = context.get('host')
if host:
parts = ("https", host, path, "", "")
page = urlunsplit(parts)
if path is not None or referer is not None or page is not None:
segment_context['page'] = {}
if path is not None:
segment_context['page']['path'] = path
if referer is not None:
segment_context['page']['referrer'] = referer
if page is not None:
segment_context['page']['url'] = page
analytics.track(
user_id,
name,
event,
context=segment_context
)
|
Use the segment.com python API to send the event to segment.com
|
train
|
https://github.com/edx/event-tracking/blob/8f993560545061d77f11615f5e3865b3916d5ea9/eventtracking/backends/segment.py#L50-L100
| null |
class SegmentBackend(object):
"""
Send events to segment.com
It is assumed that other code elsewhere initializes the segment.com API and makes calls to analytics.identify.
Requires all emitted events to have the following structure (at a minimum)::
{
'name': 'something',
'context': {
'user_id': 10,
}
}
Additionally, the following fields can optionally be defined::
{
'context': {
'agent': "your user-agent string",
'client_id': "your google analytics client id",
'host': "your hostname",
'ip': "your IP address",
'page': "your page",
'path': "your path",
'referer': "your referrer",
}
}
The 'page', 'path' and 'referer' are sent to Segment as "page" information. If the 'page' is absent but the 'host'
and 'path' are present, these are used to create a URL value to substitute for the 'page' value.
Note that although some parts of the event are lifted out to pass explicitly into the Segment.com API, the entire
event is sent as the payload to segment.com, which includes all context, data and other fields in the event.
"""
|
edx/event-tracking
|
eventtracking/locator.py
|
ThreadLocalContextLocator.get
|
python
|
def get(self):
if not self.thread_local_data:
self.thread_local_data = threading.local()
if not hasattr(self.thread_local_data, 'context'):
self.thread_local_data.context = OrderedDict()
return self.thread_local_data.context
|
Return a reference to a thread-specific context
|
train
|
https://github.com/edx/event-tracking/blob/8f993560545061d77f11615f5e3865b3916d5ea9/eventtracking/locator.py#L47-L55
| null |
class ThreadLocalContextLocator(object):
"""
Returns a different context depending on the thread that the locator
was called from. Thus, contexts can be isolated from one another
on thread boundaries.
Note that this makes use of `threading.local(),` which is typically
monkey-patched by alternative python concurrency frameworks (like `gevent`).
Calls to `threading.local()` are delayed until first usage in order to
give the third-party concurrency libraries an opportunity to monkey monkey
patch it.
"""
def __init__(self):
self.thread_local_data = None
|
edx/event-tracking
|
eventtracking/backends/logger.py
|
LoggerBackend.send
|
python
|
def send(self, event):
event_str = json.dumps(event, cls=DateTimeJSONEncoder)
# TODO: do something smarter than simply dropping the event on
# the floor.
if self.max_event_size is None or len(event_str) <= self.max_event_size:
self.log(event_str)
|
Send the event to the standard python logger
|
train
|
https://github.com/edx/event-tracking/blob/8f993560545061d77f11615f5e3865b3916d5ea9/eventtracking/backends/logger.py#L35-L42
| null |
class LoggerBackend(object):
"""
Event tracker backend that uses a python logger.
Events are logged to the INFO level as JSON strings.
"""
def __init__(self, **kwargs):
"""
Event tracker backend that uses a python logger.
`name` is an identifier for the logger, which should have
been configured using the default python mechanisms.
"""
name = kwargs.get('name', None)
self.max_event_size = kwargs.get('max_event_size', MAX_EVENT_SIZE)
self.event_logger = logging.getLogger(name)
level = kwargs.get('level', 'info')
self.log = getattr(self.event_logger, level.lower())
|
edx/event-tracking
|
eventtracking/backends/logger.py
|
DateTimeJSONEncoder.default
|
python
|
def default(self, obj): # lint-amnesty, pylint: disable=arguments-differ, method-hidden
if isinstance(obj, datetime):
if obj.tzinfo is None:
# Localize to UTC naive datetime objects
obj = UTC.localize(obj) # pylint: disable=no-value-for-parameter
else:
# Convert to UTC datetime objects from other timezones
obj = obj.astimezone(UTC)
return obj.isoformat()
elif isinstance(obj, date):
return obj.isoformat()
return super(DateTimeJSONEncoder, self).default(obj)
|
Serialize datetime and date objects of iso format.
datatime objects are converted to UTC.
|
train
|
https://github.com/edx/event-tracking/blob/8f993560545061d77f11615f5e3865b3916d5ea9/eventtracking/backends/logger.py#L48-L66
| null |
class DateTimeJSONEncoder(json.JSONEncoder):
"""JSON encoder aware of datetime.datetime and datetime.date objects"""
|
rochacbruno/python-pagseguro
|
examples/flask/flask_seguro/__init__.py
|
create_app
|
python
|
def create_app(config_name):
app = Flask(__name__)
app.config.from_object(CONFIG[config_name])
BOOTSTRAP.init_app(app)
# call controllers
from flask_seguro.controllers.main import main as main_blueprint
app.register_blueprint(main_blueprint)
return app
|
Factory Function
|
train
|
https://github.com/rochacbruno/python-pagseguro/blob/18a9ca3301783cb323e838574b59f9ddffa9a593/examples/flask/flask_seguro/__init__.py#L9-L20
| null |
""" Application Skeleton """
from flask import Flask
from flask_bootstrap import Bootstrap
from config import CONFIG
BOOTSTRAP = Bootstrap()
|
rochacbruno/python-pagseguro
|
pagseguro/__init__.py
|
PagSeguro.build_checkout_params
|
python
|
def build_checkout_params(self, **kwargs):
params = kwargs or {}
if self.sender:
params['senderName'] = self.sender.get('name')
params['senderAreaCode'] = self.sender.get('area_code')
params['senderPhone'] = self.sender.get('phone')
params['senderEmail'] = is_valid_email(self.sender.get('email'))
params['senderCPF'] = is_valid_cpf(self.sender.get('cpf'))
params['senderCNPJ'] = is_valid_cnpj(self.sender.get('cnpj'))
params['senderBornDate'] = self.sender.get('born_date')
params['senderHash'] = self.sender.get('hash')
if self.config.USE_SHIPPING:
if self.shipping:
params['shippingType'] = self.shipping.get('type')
params['shippingAddressStreet'] = self.shipping.get('street')
params['shippingAddressNumber'] = self.shipping.get('number')
params['shippingAddressComplement'] = self.shipping.get(
'complement')
params['shippingAddressDistrict'] = self.shipping.get(
'district')
params['shippingAddressPostalCode'] = self.shipping.get(
'postal_code')
params['shippingAddressCity'] = self.shipping.get('city')
params['shippingAddressState'] = self.shipping.get('state')
params['shippingAddressCountry'] = self.shipping.get('country',
'BRA')
if self.shipping.get('cost'):
params['shippingCost'] = self.shipping.get('cost')
else:
params['shippingAddressRequired'] = 'false'
if self.extra_amount:
params['extraAmount'] = self.extra_amount
params['reference'] = self.reference
params['receiverEmail'] = self.data['email']
if self.redirect_url:
params['redirectURL'] = self.redirect_url
if self.notification_url:
params['notificationURL'] = self.notification_url
if self.abandon_url:
params['abandonURL'] = self.abandon_url
for i, item in enumerate(self.items, 1):
params['itemId%s' % i] = item.get('id')
params['itemDescription%s' % i] = item.get('description')
params['itemAmount%s' % i] = item.get('amount')
params['itemQuantity%s' % i] = item.get('quantity')
params['itemWeight%s' % i] = item.get('weight')
params['itemShippingCost%s' % i] = item.get('shipping_cost')
if self.payment:
params['paymentMethod'] = self.payment.get('method')
params['paymentMode'] = self.payment.get('mode')
if self.credit_card:
params['billingAddressCountry'] = 'BRA'
credit_card_keys_map = [
('creditCardToken', 'credit_card_token'),
('installmentQuantity', 'installment_quantity'),
('installmentValue', 'installment_value'),
('noInterestInstallmentQuantity',
'no_interest_installment_quantity'),
('creditCardHolderName', 'card_holder_name'),
('creditCardHolderCPF', 'card_holder_cpf'),
('creditCardHolderBirthDate', 'card_holder_birth_date'),
('creditCardHolderAreaCode', 'card_holder_area_code'),
('creditCardHolderPhone', 'card_holder_phone'),
('billingAddressStreet', 'billing_address_street'),
('billingAddressNumber', 'billing_address_number'),
('billingAddressComplement', 'billing_address_complement'),
('billingAddressDistrict', 'billing_address_district'),
('billingAddressPostalCode', 'billing_address_postal_code'),
('billingAddressCity', 'billing_address_city'),
('billingAddressState', 'billing_address_state'),
]
for key_to_set, key_to_get in credit_card_keys_map:
params[key_to_set] = self.credit_card.get(key_to_get)
if self.pre_approval:
params['preApprovalCharge'] = self.pre_approval.get('charge')
params['preApprovalName'] = self.pre_approval.get('name')
params['preApprovalDetails'] = self.pre_approval.get('details')
params['preApprovalAmountPerPayment'] = self.pre_approval.get(
'amount_per_payment')
params['preApprovalMaxAmountPerPayment'] = self.pre_approval.get(
'max_amount_per_payment')
params['preApprovalPeriod'] = self.pre_approval.get('period')
params['preApprovalMaxPaymentsPerPeriod'] = self.pre_approval.get(
'max_payments_per_period')
params['preApprovalMaxAmountPerPeriod'] = self.pre_approval.get(
'max_amount_per_period')
params['preApprovalInitialDate'] = self.pre_approval.get(
'initial_date')
params['preApprovalFinalDate'] = self.pre_approval.get(
'final_date')
params['preApprovalMaxTotalAmount'] = self.pre_approval.get(
'max_total_amount')
self.data.update(params)
self.clean_none_params()
|
build a dict with params
|
train
|
https://github.com/rochacbruno/python-pagseguro/blob/18a9ca3301783cb323e838574b59f9ddffa9a593/pagseguro/__init__.py#L55-L164
|
[
"def is_valid_email(value):\n user_regex = re.compile(\n r\"(^[-!#$%&'*+/=?^_`{}|~0-9A-Z]+(\\.[-!#$%&'*+/=?^_`{}|~0-9A-Z]+)*$\"\n r'|^\"([\\001-\\010\\013\\014\\016-\\037!#-\\[\\]-\\177]|\\\\[\\001-\\011\\013'\n r\"\"\"\\014\\016-\\177])*\"$)\"\"\", re.IGNORECASE)\n domain_regex = re.compile(\n r'(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\\.)+(?:[A-Z]{2,6}|'\n r'[A-Z0-9-]{2,})$|^\\[(25[0-5]|2[0-4]\\d|[0-1]?\\d?\\d)(\\.(25[0-5]|'\n r'2[0-4]\\d|[0-1]?\\d?\\d)){3}\\]$', re.IGNORECASE)\n domain_whitelist = ['localhost']\n\n if not value or '@' not in value:\n raise PagSeguroValidationError(u'Email inválido')\n\n user_part, domain_part = value.rsplit('@', 1)\n\n if not user_regex.match(user_part):\n raise PagSeguroValidationError(u'Email inválido')\n\n if (domain_part not in domain_whitelist and\n not domain_regex.match(domain_part)):\n # Try for possible IDN domain-part\n try:\n domain_part = domain_part.encode('idna').decode('ascii')\n if not domain_regex.match(domain_part):\n raise PagSeguroValidationError(u'Email inválido')\n else:\n return value\n except UnicodeError:\n pass\n raise PagSeguroValidationError(u'Email inválido')\n return value\n",
"def is_valid_cpf(value):\n error_messages = {\n 'invalid': u\"CPF Inválido\",\n 'max_digits': (u\"CPF possui 11 dígitos (somente números) ou 14\"\n u\" (com pontos e hífen)\"),\n 'digits_only': (u\"Digite um CPF com apenas números ou com ponto e \"\n u\"hífen\"),\n }\n\n if value in EMPTY_VALUES:\n return u''\n orig_value = value[:]\n if not value.isdigit():\n value = re.sub(\"[-\\.]\", \"\", value)\n try:\n int(value)\n except ValueError:\n raise PagSeguroValidationError(error_messages['digits_only'])\n if len(value) != 11:\n raise PagSeguroValidationError(error_messages['max_digits'])\n orig_dv = value[-2:]\n\n new_1dv = sum([i * int(value[idx]) for idx, i in enumerate(range(10, 1, -\n 1))])\n new_1dv = DV_maker(new_1dv % 11)\n value = value[:-2] + str(new_1dv) + value[-1]\n new_2dv = sum([i * int(value[idx]) for idx, i in enumerate(range(11, 1, -\n 1))])\n new_2dv = DV_maker(new_2dv % 11)\n value = value[:-1] + str(new_2dv)\n if value[-2:] != orig_dv:\n raise PagSeguroValidationError(error_messages['invalid'])\n\n return orig_value\n",
"def is_valid_cnpj(value):\n\n error_messages = {\n 'invalid': u\"CNPJ Inválido\",\n 'max_digits': (u\"CNPJ possui 14 dígitos (somente números) ou 14\"\n u\" (com pontos e hífen)\"),\n 'digits_only': (\n u\"Digite um CNPJ com apenas números ou com ponto, barra \"\n u\"hífen\"),\n }\n\n if value in EMPTY_VALUES:\n return u''\n if not value.isdigit():\n value = re.sub(\"[-/\\.]\", \"\", value)\n orig_value = value[:]\n try:\n int(value)\n except ValueError:\n raise PagSeguroValidationError(error_messages['digits_only'])\n if len(value) != 14:\n raise PagSeguroValidationError(error_messages['max_digits'])\n\n orig_dv = value[-2:]\n\n new_1dv = sum([i * int(value[idx]) for idx, i in enumerate(list(range(\n 5, 1, -1)) + list(range(9, 1, -1)))])\n new_1dv = DV_maker(new_1dv % 11)\n value = value[:-2] + str(new_1dv) + value[-1]\n new_2dv = sum([i * int(value[idx]) for idx, i in enumerate(list(range(\n 6, 1, -1)) + list(range(9, 1, -1)))])\n new_2dv = DV_maker(new_2dv % 11)\n value = value[:-1] + str(new_2dv)\n if value[-2:] != orig_dv:\n raise PagSeguroValidationError(error_messages['invalid'])\n\n return orig_value\n",
"def clean_none_params(self):\n self.data = \\\n {k: v for k, v in self.data.items() if v or isinstance(v, bool)}\n"
] |
class PagSeguro(object):
""" Pag Seguro V2 wrapper """
PAC = 1
SEDEX = 2
NONE = 3
def __init__(self, email, token, data=None, config=None):
config = config or {}
if not type(config) == dict:
raise Exception('Malformed config dict param')
self.config = Config(**config)
self.data = {}
self.data['email'] = email
self.data['token'] = token
if data and isinstance(data, dict):
self.data.update(data)
self.items = []
self.sender = {}
self.shipping = {}
self._reference = ""
self.extra_amount = None
self.redirect_url = None
self.notification_url = None
self.abandon_url = None
self.credit_card = {}
self.pre_approval = {}
self.checkout_session = None
self.payment = {}
def build_pre_approval_payment_params(self, **kwargs):
""" build a dict with params """
params = kwargs or {}
params['reference'] = self.reference
params['preApprovalCode'] = self.code
for i, item in enumerate(self.items, 1):
params['itemId%s' % i] = item.get('id')
params['itemDescription%s' % i] = item.get('description')
params['itemAmount%s' % i] = item.get('amount')
params['itemQuantity%s' % i] = item.get('quantity')
params['itemWeight%s' % i] = item.get('weight')
params['itemShippingCost%s' % i] = item.get('shipping_cost')
self.data.update(params)
self.clean_none_params()
def clean_none_params(self):
self.data = \
{k: v for k, v in self.data.items() if v or isinstance(v, bool)}
@property
def reference_prefix(self):
return self.config.REFERENCE_PREFIX or "%s"
@reference_prefix.setter
def reference_prefix(self, value):
self.config.REFERENCE_PREFIX = (value or "") + "%s"
@property
def reference(self):
return self.reference_prefix % self._reference
@reference.setter
def reference(self, value):
if not isinstance(value, str):
value = str(value)
if value.startswith(self.reference_prefix):
value = value[len(self.reference_prefix):]
self._reference = value
def get(self, url):
""" do a get transaction """
return requests.get(url, params=self.data, headers=self.config.HEADERS)
def post(self, url):
""" do a post request """
return requests.post(url, data=self.data, headers=self.config.HEADERS)
def checkout(self, transparent=False, **kwargs):
""" create a pagseguro checkout """
self.data['currency'] = self.config.CURRENCY
self.build_checkout_params(**kwargs)
if transparent:
response = self.post(url=self.config.TRANSPARENT_CHECKOUT_URL)
else:
response = self.post(url=self.config.CHECKOUT_URL)
return PagSeguroCheckoutResponse(response.content, config=self.config)
def transparent_checkout_session(self):
response = self.post(url=self.config.SESSION_CHECKOUT_URL)
return PagSeguroCheckoutSession(response.content,
config=self.config).session_id
def check_notification(self, code):
""" check a notification by its code """
response = self.get(url=self.config.NOTIFICATION_URL % code)
return PagSeguroNotificationResponse(response.content, self.config)
def check_pre_approval_notification(self, code):
""" check a notification by its code """
response = self.get(
url=self.config.PRE_APPROVAL_NOTIFICATION_URL % code)
return PagSeguroPreApprovalNotificationResponse(
response.content, self.config)
def pre_approval_ask_payment(self, **kwargs):
""" ask form a subscribe payment """
self.build_pre_approval_payment_params(**kwargs)
response = self.post(url=self.config.PRE_APPROVAL_PAYMENT_URL)
return PagSeguroPreApprovalPayment(response.content, self.config)
def pre_approval_cancel(self, code):
""" cancel a subscribe """
response = self.get(url=self.config.PRE_APPROVAL_CANCEL_URL % code)
return PagSeguroPreApprovalCancel(response.content, self.config)
def check_transaction(self, code):
""" check a transaction by its code """
response = self.get(url=self.config.TRANSACTION_URL % code)
return PagSeguroNotificationResponse(response.content, self.config)
def query_transactions(self, initial_date, final_date,
page=None,
max_results=None):
""" query transaction by date range """
last_page = False
results = []
while last_page is False:
search_result = self._consume_query_transactions(
initial_date, final_date, page, max_results)
results.extend(search_result.transactions)
if search_result.current_page is None or \
search_result.total_pages is None or \
search_result.current_page == search_result.total_pages:
last_page = True
else:
page = search_result.current_page + 1
return results
def _consume_query_transactions(self, initial_date, final_date,
page=None,
max_results=None):
querystring = {
'initialDate': initial_date.strftime('%Y-%m-%dT%H:%M'),
'finalDate': final_date.strftime('%Y-%m-%dT%H:%M'),
'page': page,
'maxPageResults': max_results,
}
self.data.update(querystring)
self.clean_none_params()
response = self.get(url=self.config.QUERY_TRANSACTION_URL)
return PagSeguroTransactionSearchResult(response.content, self.config)
def query_pre_approvals(self, initial_date, final_date, page=None,
max_results=None):
""" query pre-approvals by date range """
last_page = False
results = []
while last_page is False:
search_result = self._consume_query_pre_approvals(
initial_date, final_date, page, max_results)
results.extend(search_result.pre_approvals)
if search_result.current_page is None or \
search_result.total_pages is None or \
search_result.current_page == search_result.total_pages:
last_page = True
else:
page = search_result.current_page + 1
return results
def _consume_query_pre_approvals(self, initial_date, final_date, page=None,
max_results=None):
querystring = {
'initialDate': initial_date.strftime('%Y-%m-%dT%H:%M'),
'finalDate': final_date.strftime('%Y-%m-%dT%H:%M'),
'page': page,
'maxPageResults': max_results,
}
self.data.update(querystring)
self.clean_none_params()
response = self.get(url=self.config.QUERY_PRE_APPROVAL_URL)
return PagSeguroPreApprovalSearch(response.content, self.config)
def query_pre_approvals_by_code(self, code):
""" query pre-approvals by code """
result = self._consume_query_pre_approvals_by_code(code)
return result
def _consume_query_pre_approvals_by_code(self, code):
response = self.get(
url='%s/%s' % (self.config.QUERY_PRE_APPROVAL_URL, code)
)
return PagSeguroPreApproval(response.content, self.config)
def add_item(self, **kwargs):
self.items.append(kwargs)
|
rochacbruno/python-pagseguro
|
pagseguro/__init__.py
|
PagSeguro.build_pre_approval_payment_params
|
python
|
def build_pre_approval_payment_params(self, **kwargs):
params = kwargs or {}
params['reference'] = self.reference
params['preApprovalCode'] = self.code
for i, item in enumerate(self.items, 1):
params['itemId%s' % i] = item.get('id')
params['itemDescription%s' % i] = item.get('description')
params['itemAmount%s' % i] = item.get('amount')
params['itemQuantity%s' % i] = item.get('quantity')
params['itemWeight%s' % i] = item.get('weight')
params['itemShippingCost%s' % i] = item.get('shipping_cost')
self.data.update(params)
self.clean_none_params()
|
build a dict with params
|
train
|
https://github.com/rochacbruno/python-pagseguro/blob/18a9ca3301783cb323e838574b59f9ddffa9a593/pagseguro/__init__.py#L166-L183
|
[
"def clean_none_params(self):\n self.data = \\\n {k: v for k, v in self.data.items() if v or isinstance(v, bool)}\n"
] |
class PagSeguro(object):
""" Pag Seguro V2 wrapper """
PAC = 1
SEDEX = 2
NONE = 3
def __init__(self, email, token, data=None, config=None):
config = config or {}
if not type(config) == dict:
raise Exception('Malformed config dict param')
self.config = Config(**config)
self.data = {}
self.data['email'] = email
self.data['token'] = token
if data and isinstance(data, dict):
self.data.update(data)
self.items = []
self.sender = {}
self.shipping = {}
self._reference = ""
self.extra_amount = None
self.redirect_url = None
self.notification_url = None
self.abandon_url = None
self.credit_card = {}
self.pre_approval = {}
self.checkout_session = None
self.payment = {}
def build_checkout_params(self, **kwargs):
""" build a dict with params """
params = kwargs or {}
if self.sender:
params['senderName'] = self.sender.get('name')
params['senderAreaCode'] = self.sender.get('area_code')
params['senderPhone'] = self.sender.get('phone')
params['senderEmail'] = is_valid_email(self.sender.get('email'))
params['senderCPF'] = is_valid_cpf(self.sender.get('cpf'))
params['senderCNPJ'] = is_valid_cnpj(self.sender.get('cnpj'))
params['senderBornDate'] = self.sender.get('born_date')
params['senderHash'] = self.sender.get('hash')
if self.config.USE_SHIPPING:
if self.shipping:
params['shippingType'] = self.shipping.get('type')
params['shippingAddressStreet'] = self.shipping.get('street')
params['shippingAddressNumber'] = self.shipping.get('number')
params['shippingAddressComplement'] = self.shipping.get(
'complement')
params['shippingAddressDistrict'] = self.shipping.get(
'district')
params['shippingAddressPostalCode'] = self.shipping.get(
'postal_code')
params['shippingAddressCity'] = self.shipping.get('city')
params['shippingAddressState'] = self.shipping.get('state')
params['shippingAddressCountry'] = self.shipping.get('country',
'BRA')
if self.shipping.get('cost'):
params['shippingCost'] = self.shipping.get('cost')
else:
params['shippingAddressRequired'] = 'false'
if self.extra_amount:
params['extraAmount'] = self.extra_amount
params['reference'] = self.reference
params['receiverEmail'] = self.data['email']
if self.redirect_url:
params['redirectURL'] = self.redirect_url
if self.notification_url:
params['notificationURL'] = self.notification_url
if self.abandon_url:
params['abandonURL'] = self.abandon_url
for i, item in enumerate(self.items, 1):
params['itemId%s' % i] = item.get('id')
params['itemDescription%s' % i] = item.get('description')
params['itemAmount%s' % i] = item.get('amount')
params['itemQuantity%s' % i] = item.get('quantity')
params['itemWeight%s' % i] = item.get('weight')
params['itemShippingCost%s' % i] = item.get('shipping_cost')
if self.payment:
params['paymentMethod'] = self.payment.get('method')
params['paymentMode'] = self.payment.get('mode')
if self.credit_card:
params['billingAddressCountry'] = 'BRA'
credit_card_keys_map = [
('creditCardToken', 'credit_card_token'),
('installmentQuantity', 'installment_quantity'),
('installmentValue', 'installment_value'),
('noInterestInstallmentQuantity',
'no_interest_installment_quantity'),
('creditCardHolderName', 'card_holder_name'),
('creditCardHolderCPF', 'card_holder_cpf'),
('creditCardHolderBirthDate', 'card_holder_birth_date'),
('creditCardHolderAreaCode', 'card_holder_area_code'),
('creditCardHolderPhone', 'card_holder_phone'),
('billingAddressStreet', 'billing_address_street'),
('billingAddressNumber', 'billing_address_number'),
('billingAddressComplement', 'billing_address_complement'),
('billingAddressDistrict', 'billing_address_district'),
('billingAddressPostalCode', 'billing_address_postal_code'),
('billingAddressCity', 'billing_address_city'),
('billingAddressState', 'billing_address_state'),
]
for key_to_set, key_to_get in credit_card_keys_map:
params[key_to_set] = self.credit_card.get(key_to_get)
if self.pre_approval:
params['preApprovalCharge'] = self.pre_approval.get('charge')
params['preApprovalName'] = self.pre_approval.get('name')
params['preApprovalDetails'] = self.pre_approval.get('details')
params['preApprovalAmountPerPayment'] = self.pre_approval.get(
'amount_per_payment')
params['preApprovalMaxAmountPerPayment'] = self.pre_approval.get(
'max_amount_per_payment')
params['preApprovalPeriod'] = self.pre_approval.get('period')
params['preApprovalMaxPaymentsPerPeriod'] = self.pre_approval.get(
'max_payments_per_period')
params['preApprovalMaxAmountPerPeriod'] = self.pre_approval.get(
'max_amount_per_period')
params['preApprovalInitialDate'] = self.pre_approval.get(
'initial_date')
params['preApprovalFinalDate'] = self.pre_approval.get(
'final_date')
params['preApprovalMaxTotalAmount'] = self.pre_approval.get(
'max_total_amount')
self.data.update(params)
self.clean_none_params()
def clean_none_params(self):
self.data = \
{k: v for k, v in self.data.items() if v or isinstance(v, bool)}
@property
def reference_prefix(self):
return self.config.REFERENCE_PREFIX or "%s"
@reference_prefix.setter
def reference_prefix(self, value):
self.config.REFERENCE_PREFIX = (value or "") + "%s"
@property
def reference(self):
return self.reference_prefix % self._reference
@reference.setter
def reference(self, value):
if not isinstance(value, str):
value = str(value)
if value.startswith(self.reference_prefix):
value = value[len(self.reference_prefix):]
self._reference = value
def get(self, url):
""" do a get transaction """
return requests.get(url, params=self.data, headers=self.config.HEADERS)
def post(self, url):
""" do a post request """
return requests.post(url, data=self.data, headers=self.config.HEADERS)
def checkout(self, transparent=False, **kwargs):
""" create a pagseguro checkout """
self.data['currency'] = self.config.CURRENCY
self.build_checkout_params(**kwargs)
if transparent:
response = self.post(url=self.config.TRANSPARENT_CHECKOUT_URL)
else:
response = self.post(url=self.config.CHECKOUT_URL)
return PagSeguroCheckoutResponse(response.content, config=self.config)
def transparent_checkout_session(self):
response = self.post(url=self.config.SESSION_CHECKOUT_URL)
return PagSeguroCheckoutSession(response.content,
config=self.config).session_id
def check_notification(self, code):
""" check a notification by its code """
response = self.get(url=self.config.NOTIFICATION_URL % code)
return PagSeguroNotificationResponse(response.content, self.config)
def check_pre_approval_notification(self, code):
""" check a notification by its code """
response = self.get(
url=self.config.PRE_APPROVAL_NOTIFICATION_URL % code)
return PagSeguroPreApprovalNotificationResponse(
response.content, self.config)
def pre_approval_ask_payment(self, **kwargs):
""" ask form a subscribe payment """
self.build_pre_approval_payment_params(**kwargs)
response = self.post(url=self.config.PRE_APPROVAL_PAYMENT_URL)
return PagSeguroPreApprovalPayment(response.content, self.config)
def pre_approval_cancel(self, code):
""" cancel a subscribe """
response = self.get(url=self.config.PRE_APPROVAL_CANCEL_URL % code)
return PagSeguroPreApprovalCancel(response.content, self.config)
def check_transaction(self, code):
""" check a transaction by its code """
response = self.get(url=self.config.TRANSACTION_URL % code)
return PagSeguroNotificationResponse(response.content, self.config)
def query_transactions(self, initial_date, final_date,
page=None,
max_results=None):
""" query transaction by date range """
last_page = False
results = []
while last_page is False:
search_result = self._consume_query_transactions(
initial_date, final_date, page, max_results)
results.extend(search_result.transactions)
if search_result.current_page is None or \
search_result.total_pages is None or \
search_result.current_page == search_result.total_pages:
last_page = True
else:
page = search_result.current_page + 1
return results
def _consume_query_transactions(self, initial_date, final_date,
page=None,
max_results=None):
querystring = {
'initialDate': initial_date.strftime('%Y-%m-%dT%H:%M'),
'finalDate': final_date.strftime('%Y-%m-%dT%H:%M'),
'page': page,
'maxPageResults': max_results,
}
self.data.update(querystring)
self.clean_none_params()
response = self.get(url=self.config.QUERY_TRANSACTION_URL)
return PagSeguroTransactionSearchResult(response.content, self.config)
def query_pre_approvals(self, initial_date, final_date, page=None,
max_results=None):
""" query pre-approvals by date range """
last_page = False
results = []
while last_page is False:
search_result = self._consume_query_pre_approvals(
initial_date, final_date, page, max_results)
results.extend(search_result.pre_approvals)
if search_result.current_page is None or \
search_result.total_pages is None or \
search_result.current_page == search_result.total_pages:
last_page = True
else:
page = search_result.current_page + 1
return results
def _consume_query_pre_approvals(self, initial_date, final_date, page=None,
max_results=None):
querystring = {
'initialDate': initial_date.strftime('%Y-%m-%dT%H:%M'),
'finalDate': final_date.strftime('%Y-%m-%dT%H:%M'),
'page': page,
'maxPageResults': max_results,
}
self.data.update(querystring)
self.clean_none_params()
response = self.get(url=self.config.QUERY_PRE_APPROVAL_URL)
return PagSeguroPreApprovalSearch(response.content, self.config)
def query_pre_approvals_by_code(self, code):
""" query pre-approvals by code """
result = self._consume_query_pre_approvals_by_code(code)
return result
def _consume_query_pre_approvals_by_code(self, code):
response = self.get(
url='%s/%s' % (self.config.QUERY_PRE_APPROVAL_URL, code)
)
return PagSeguroPreApproval(response.content, self.config)
def add_item(self, **kwargs):
self.items.append(kwargs)
|
rochacbruno/python-pagseguro
|
pagseguro/__init__.py
|
PagSeguro.get
|
python
|
def get(self, url):
return requests.get(url, params=self.data, headers=self.config.HEADERS)
|
do a get transaction
|
train
|
https://github.com/rochacbruno/python-pagseguro/blob/18a9ca3301783cb323e838574b59f9ddffa9a593/pagseguro/__init__.py#L209-L211
| null |
class PagSeguro(object):
""" Pag Seguro V2 wrapper """
PAC = 1
SEDEX = 2
NONE = 3
def __init__(self, email, token, data=None, config=None):
config = config or {}
if not type(config) == dict:
raise Exception('Malformed config dict param')
self.config = Config(**config)
self.data = {}
self.data['email'] = email
self.data['token'] = token
if data and isinstance(data, dict):
self.data.update(data)
self.items = []
self.sender = {}
self.shipping = {}
self._reference = ""
self.extra_amount = None
self.redirect_url = None
self.notification_url = None
self.abandon_url = None
self.credit_card = {}
self.pre_approval = {}
self.checkout_session = None
self.payment = {}
def build_checkout_params(self, **kwargs):
""" build a dict with params """
params = kwargs or {}
if self.sender:
params['senderName'] = self.sender.get('name')
params['senderAreaCode'] = self.sender.get('area_code')
params['senderPhone'] = self.sender.get('phone')
params['senderEmail'] = is_valid_email(self.sender.get('email'))
params['senderCPF'] = is_valid_cpf(self.sender.get('cpf'))
params['senderCNPJ'] = is_valid_cnpj(self.sender.get('cnpj'))
params['senderBornDate'] = self.sender.get('born_date')
params['senderHash'] = self.sender.get('hash')
if self.config.USE_SHIPPING:
if self.shipping:
params['shippingType'] = self.shipping.get('type')
params['shippingAddressStreet'] = self.shipping.get('street')
params['shippingAddressNumber'] = self.shipping.get('number')
params['shippingAddressComplement'] = self.shipping.get(
'complement')
params['shippingAddressDistrict'] = self.shipping.get(
'district')
params['shippingAddressPostalCode'] = self.shipping.get(
'postal_code')
params['shippingAddressCity'] = self.shipping.get('city')
params['shippingAddressState'] = self.shipping.get('state')
params['shippingAddressCountry'] = self.shipping.get('country',
'BRA')
if self.shipping.get('cost'):
params['shippingCost'] = self.shipping.get('cost')
else:
params['shippingAddressRequired'] = 'false'
if self.extra_amount:
params['extraAmount'] = self.extra_amount
params['reference'] = self.reference
params['receiverEmail'] = self.data['email']
if self.redirect_url:
params['redirectURL'] = self.redirect_url
if self.notification_url:
params['notificationURL'] = self.notification_url
if self.abandon_url:
params['abandonURL'] = self.abandon_url
for i, item in enumerate(self.items, 1):
params['itemId%s' % i] = item.get('id')
params['itemDescription%s' % i] = item.get('description')
params['itemAmount%s' % i] = item.get('amount')
params['itemQuantity%s' % i] = item.get('quantity')
params['itemWeight%s' % i] = item.get('weight')
params['itemShippingCost%s' % i] = item.get('shipping_cost')
if self.payment:
params['paymentMethod'] = self.payment.get('method')
params['paymentMode'] = self.payment.get('mode')
if self.credit_card:
params['billingAddressCountry'] = 'BRA'
credit_card_keys_map = [
('creditCardToken', 'credit_card_token'),
('installmentQuantity', 'installment_quantity'),
('installmentValue', 'installment_value'),
('noInterestInstallmentQuantity',
'no_interest_installment_quantity'),
('creditCardHolderName', 'card_holder_name'),
('creditCardHolderCPF', 'card_holder_cpf'),
('creditCardHolderBirthDate', 'card_holder_birth_date'),
('creditCardHolderAreaCode', 'card_holder_area_code'),
('creditCardHolderPhone', 'card_holder_phone'),
('billingAddressStreet', 'billing_address_street'),
('billingAddressNumber', 'billing_address_number'),
('billingAddressComplement', 'billing_address_complement'),
('billingAddressDistrict', 'billing_address_district'),
('billingAddressPostalCode', 'billing_address_postal_code'),
('billingAddressCity', 'billing_address_city'),
('billingAddressState', 'billing_address_state'),
]
for key_to_set, key_to_get in credit_card_keys_map:
params[key_to_set] = self.credit_card.get(key_to_get)
if self.pre_approval:
params['preApprovalCharge'] = self.pre_approval.get('charge')
params['preApprovalName'] = self.pre_approval.get('name')
params['preApprovalDetails'] = self.pre_approval.get('details')
params['preApprovalAmountPerPayment'] = self.pre_approval.get(
'amount_per_payment')
params['preApprovalMaxAmountPerPayment'] = self.pre_approval.get(
'max_amount_per_payment')
params['preApprovalPeriod'] = self.pre_approval.get('period')
params['preApprovalMaxPaymentsPerPeriod'] = self.pre_approval.get(
'max_payments_per_period')
params['preApprovalMaxAmountPerPeriod'] = self.pre_approval.get(
'max_amount_per_period')
params['preApprovalInitialDate'] = self.pre_approval.get(
'initial_date')
params['preApprovalFinalDate'] = self.pre_approval.get(
'final_date')
params['preApprovalMaxTotalAmount'] = self.pre_approval.get(
'max_total_amount')
self.data.update(params)
self.clean_none_params()
def build_pre_approval_payment_params(self, **kwargs):
""" build a dict with params """
params = kwargs or {}
params['reference'] = self.reference
params['preApprovalCode'] = self.code
for i, item in enumerate(self.items, 1):
params['itemId%s' % i] = item.get('id')
params['itemDescription%s' % i] = item.get('description')
params['itemAmount%s' % i] = item.get('amount')
params['itemQuantity%s' % i] = item.get('quantity')
params['itemWeight%s' % i] = item.get('weight')
params['itemShippingCost%s' % i] = item.get('shipping_cost')
self.data.update(params)
self.clean_none_params()
def clean_none_params(self):
self.data = \
{k: v for k, v in self.data.items() if v or isinstance(v, bool)}
@property
def reference_prefix(self):
return self.config.REFERENCE_PREFIX or "%s"
@reference_prefix.setter
def reference_prefix(self, value):
self.config.REFERENCE_PREFIX = (value or "") + "%s"
@property
def reference(self):
return self.reference_prefix % self._reference
@reference.setter
def reference(self, value):
if not isinstance(value, str):
value = str(value)
if value.startswith(self.reference_prefix):
value = value[len(self.reference_prefix):]
self._reference = value
def post(self, url):
""" do a post request """
return requests.post(url, data=self.data, headers=self.config.HEADERS)
def checkout(self, transparent=False, **kwargs):
""" create a pagseguro checkout """
self.data['currency'] = self.config.CURRENCY
self.build_checkout_params(**kwargs)
if transparent:
response = self.post(url=self.config.TRANSPARENT_CHECKOUT_URL)
else:
response = self.post(url=self.config.CHECKOUT_URL)
return PagSeguroCheckoutResponse(response.content, config=self.config)
def transparent_checkout_session(self):
response = self.post(url=self.config.SESSION_CHECKOUT_URL)
return PagSeguroCheckoutSession(response.content,
config=self.config).session_id
def check_notification(self, code):
""" check a notification by its code """
response = self.get(url=self.config.NOTIFICATION_URL % code)
return PagSeguroNotificationResponse(response.content, self.config)
def check_pre_approval_notification(self, code):
""" check a notification by its code """
response = self.get(
url=self.config.PRE_APPROVAL_NOTIFICATION_URL % code)
return PagSeguroPreApprovalNotificationResponse(
response.content, self.config)
def pre_approval_ask_payment(self, **kwargs):
""" ask form a subscribe payment """
self.build_pre_approval_payment_params(**kwargs)
response = self.post(url=self.config.PRE_APPROVAL_PAYMENT_URL)
return PagSeguroPreApprovalPayment(response.content, self.config)
def pre_approval_cancel(self, code):
""" cancel a subscribe """
response = self.get(url=self.config.PRE_APPROVAL_CANCEL_URL % code)
return PagSeguroPreApprovalCancel(response.content, self.config)
def check_transaction(self, code):
""" check a transaction by its code """
response = self.get(url=self.config.TRANSACTION_URL % code)
return PagSeguroNotificationResponse(response.content, self.config)
def query_transactions(self, initial_date, final_date,
page=None,
max_results=None):
""" query transaction by date range """
last_page = False
results = []
while last_page is False:
search_result = self._consume_query_transactions(
initial_date, final_date, page, max_results)
results.extend(search_result.transactions)
if search_result.current_page is None or \
search_result.total_pages is None or \
search_result.current_page == search_result.total_pages:
last_page = True
else:
page = search_result.current_page + 1
return results
def _consume_query_transactions(self, initial_date, final_date,
page=None,
max_results=None):
querystring = {
'initialDate': initial_date.strftime('%Y-%m-%dT%H:%M'),
'finalDate': final_date.strftime('%Y-%m-%dT%H:%M'),
'page': page,
'maxPageResults': max_results,
}
self.data.update(querystring)
self.clean_none_params()
response = self.get(url=self.config.QUERY_TRANSACTION_URL)
return PagSeguroTransactionSearchResult(response.content, self.config)
def query_pre_approvals(self, initial_date, final_date, page=None,
max_results=None):
""" query pre-approvals by date range """
last_page = False
results = []
while last_page is False:
search_result = self._consume_query_pre_approvals(
initial_date, final_date, page, max_results)
results.extend(search_result.pre_approvals)
if search_result.current_page is None or \
search_result.total_pages is None or \
search_result.current_page == search_result.total_pages:
last_page = True
else:
page = search_result.current_page + 1
return results
def _consume_query_pre_approvals(self, initial_date, final_date, page=None,
max_results=None):
querystring = {
'initialDate': initial_date.strftime('%Y-%m-%dT%H:%M'),
'finalDate': final_date.strftime('%Y-%m-%dT%H:%M'),
'page': page,
'maxPageResults': max_results,
}
self.data.update(querystring)
self.clean_none_params()
response = self.get(url=self.config.QUERY_PRE_APPROVAL_URL)
return PagSeguroPreApprovalSearch(response.content, self.config)
def query_pre_approvals_by_code(self, code):
""" query pre-approvals by code """
result = self._consume_query_pre_approvals_by_code(code)
return result
def _consume_query_pre_approvals_by_code(self, code):
response = self.get(
url='%s/%s' % (self.config.QUERY_PRE_APPROVAL_URL, code)
)
return PagSeguroPreApproval(response.content, self.config)
def add_item(self, **kwargs):
self.items.append(kwargs)
|
rochacbruno/python-pagseguro
|
pagseguro/__init__.py
|
PagSeguro.post
|
python
|
def post(self, url):
return requests.post(url, data=self.data, headers=self.config.HEADERS)
|
do a post request
|
train
|
https://github.com/rochacbruno/python-pagseguro/blob/18a9ca3301783cb323e838574b59f9ddffa9a593/pagseguro/__init__.py#L213-L215
| null |
class PagSeguro(object):
""" Pag Seguro V2 wrapper """
PAC = 1
SEDEX = 2
NONE = 3
def __init__(self, email, token, data=None, config=None):
config = config or {}
if not type(config) == dict:
raise Exception('Malformed config dict param')
self.config = Config(**config)
self.data = {}
self.data['email'] = email
self.data['token'] = token
if data and isinstance(data, dict):
self.data.update(data)
self.items = []
self.sender = {}
self.shipping = {}
self._reference = ""
self.extra_amount = None
self.redirect_url = None
self.notification_url = None
self.abandon_url = None
self.credit_card = {}
self.pre_approval = {}
self.checkout_session = None
self.payment = {}
def build_checkout_params(self, **kwargs):
""" build a dict with params """
params = kwargs or {}
if self.sender:
params['senderName'] = self.sender.get('name')
params['senderAreaCode'] = self.sender.get('area_code')
params['senderPhone'] = self.sender.get('phone')
params['senderEmail'] = is_valid_email(self.sender.get('email'))
params['senderCPF'] = is_valid_cpf(self.sender.get('cpf'))
params['senderCNPJ'] = is_valid_cnpj(self.sender.get('cnpj'))
params['senderBornDate'] = self.sender.get('born_date')
params['senderHash'] = self.sender.get('hash')
if self.config.USE_SHIPPING:
if self.shipping:
params['shippingType'] = self.shipping.get('type')
params['shippingAddressStreet'] = self.shipping.get('street')
params['shippingAddressNumber'] = self.shipping.get('number')
params['shippingAddressComplement'] = self.shipping.get(
'complement')
params['shippingAddressDistrict'] = self.shipping.get(
'district')
params['shippingAddressPostalCode'] = self.shipping.get(
'postal_code')
params['shippingAddressCity'] = self.shipping.get('city')
params['shippingAddressState'] = self.shipping.get('state')
params['shippingAddressCountry'] = self.shipping.get('country',
'BRA')
if self.shipping.get('cost'):
params['shippingCost'] = self.shipping.get('cost')
else:
params['shippingAddressRequired'] = 'false'
if self.extra_amount:
params['extraAmount'] = self.extra_amount
params['reference'] = self.reference
params['receiverEmail'] = self.data['email']
if self.redirect_url:
params['redirectURL'] = self.redirect_url
if self.notification_url:
params['notificationURL'] = self.notification_url
if self.abandon_url:
params['abandonURL'] = self.abandon_url
for i, item in enumerate(self.items, 1):
params['itemId%s' % i] = item.get('id')
params['itemDescription%s' % i] = item.get('description')
params['itemAmount%s' % i] = item.get('amount')
params['itemQuantity%s' % i] = item.get('quantity')
params['itemWeight%s' % i] = item.get('weight')
params['itemShippingCost%s' % i] = item.get('shipping_cost')
if self.payment:
params['paymentMethod'] = self.payment.get('method')
params['paymentMode'] = self.payment.get('mode')
if self.credit_card:
params['billingAddressCountry'] = 'BRA'
credit_card_keys_map = [
('creditCardToken', 'credit_card_token'),
('installmentQuantity', 'installment_quantity'),
('installmentValue', 'installment_value'),
('noInterestInstallmentQuantity',
'no_interest_installment_quantity'),
('creditCardHolderName', 'card_holder_name'),
('creditCardHolderCPF', 'card_holder_cpf'),
('creditCardHolderBirthDate', 'card_holder_birth_date'),
('creditCardHolderAreaCode', 'card_holder_area_code'),
('creditCardHolderPhone', 'card_holder_phone'),
('billingAddressStreet', 'billing_address_street'),
('billingAddressNumber', 'billing_address_number'),
('billingAddressComplement', 'billing_address_complement'),
('billingAddressDistrict', 'billing_address_district'),
('billingAddressPostalCode', 'billing_address_postal_code'),
('billingAddressCity', 'billing_address_city'),
('billingAddressState', 'billing_address_state'),
]
for key_to_set, key_to_get in credit_card_keys_map:
params[key_to_set] = self.credit_card.get(key_to_get)
if self.pre_approval:
params['preApprovalCharge'] = self.pre_approval.get('charge')
params['preApprovalName'] = self.pre_approval.get('name')
params['preApprovalDetails'] = self.pre_approval.get('details')
params['preApprovalAmountPerPayment'] = self.pre_approval.get(
'amount_per_payment')
params['preApprovalMaxAmountPerPayment'] = self.pre_approval.get(
'max_amount_per_payment')
params['preApprovalPeriod'] = self.pre_approval.get('period')
params['preApprovalMaxPaymentsPerPeriod'] = self.pre_approval.get(
'max_payments_per_period')
params['preApprovalMaxAmountPerPeriod'] = self.pre_approval.get(
'max_amount_per_period')
params['preApprovalInitialDate'] = self.pre_approval.get(
'initial_date')
params['preApprovalFinalDate'] = self.pre_approval.get(
'final_date')
params['preApprovalMaxTotalAmount'] = self.pre_approval.get(
'max_total_amount')
self.data.update(params)
self.clean_none_params()
def build_pre_approval_payment_params(self, **kwargs):
""" build a dict with params """
params = kwargs or {}
params['reference'] = self.reference
params['preApprovalCode'] = self.code
for i, item in enumerate(self.items, 1):
params['itemId%s' % i] = item.get('id')
params['itemDescription%s' % i] = item.get('description')
params['itemAmount%s' % i] = item.get('amount')
params['itemQuantity%s' % i] = item.get('quantity')
params['itemWeight%s' % i] = item.get('weight')
params['itemShippingCost%s' % i] = item.get('shipping_cost')
self.data.update(params)
self.clean_none_params()
def clean_none_params(self):
self.data = \
{k: v for k, v in self.data.items() if v or isinstance(v, bool)}
@property
def reference_prefix(self):
return self.config.REFERENCE_PREFIX or "%s"
@reference_prefix.setter
def reference_prefix(self, value):
self.config.REFERENCE_PREFIX = (value or "") + "%s"
@property
def reference(self):
return self.reference_prefix % self._reference
@reference.setter
def reference(self, value):
if not isinstance(value, str):
value = str(value)
if value.startswith(self.reference_prefix):
value = value[len(self.reference_prefix):]
self._reference = value
def get(self, url):
""" do a get transaction """
return requests.get(url, params=self.data, headers=self.config.HEADERS)
def checkout(self, transparent=False, **kwargs):
""" create a pagseguro checkout """
self.data['currency'] = self.config.CURRENCY
self.build_checkout_params(**kwargs)
if transparent:
response = self.post(url=self.config.TRANSPARENT_CHECKOUT_URL)
else:
response = self.post(url=self.config.CHECKOUT_URL)
return PagSeguroCheckoutResponse(response.content, config=self.config)
def transparent_checkout_session(self):
response = self.post(url=self.config.SESSION_CHECKOUT_URL)
return PagSeguroCheckoutSession(response.content,
config=self.config).session_id
def check_notification(self, code):
""" check a notification by its code """
response = self.get(url=self.config.NOTIFICATION_URL % code)
return PagSeguroNotificationResponse(response.content, self.config)
def check_pre_approval_notification(self, code):
""" check a notification by its code """
response = self.get(
url=self.config.PRE_APPROVAL_NOTIFICATION_URL % code)
return PagSeguroPreApprovalNotificationResponse(
response.content, self.config)
def pre_approval_ask_payment(self, **kwargs):
""" ask form a subscribe payment """
self.build_pre_approval_payment_params(**kwargs)
response = self.post(url=self.config.PRE_APPROVAL_PAYMENT_URL)
return PagSeguroPreApprovalPayment(response.content, self.config)
def pre_approval_cancel(self, code):
""" cancel a subscribe """
response = self.get(url=self.config.PRE_APPROVAL_CANCEL_URL % code)
return PagSeguroPreApprovalCancel(response.content, self.config)
def check_transaction(self, code):
""" check a transaction by its code """
response = self.get(url=self.config.TRANSACTION_URL % code)
return PagSeguroNotificationResponse(response.content, self.config)
def query_transactions(self, initial_date, final_date,
page=None,
max_results=None):
""" query transaction by date range """
last_page = False
results = []
while last_page is False:
search_result = self._consume_query_transactions(
initial_date, final_date, page, max_results)
results.extend(search_result.transactions)
if search_result.current_page is None or \
search_result.total_pages is None or \
search_result.current_page == search_result.total_pages:
last_page = True
else:
page = search_result.current_page + 1
return results
def _consume_query_transactions(self, initial_date, final_date,
page=None,
max_results=None):
querystring = {
'initialDate': initial_date.strftime('%Y-%m-%dT%H:%M'),
'finalDate': final_date.strftime('%Y-%m-%dT%H:%M'),
'page': page,
'maxPageResults': max_results,
}
self.data.update(querystring)
self.clean_none_params()
response = self.get(url=self.config.QUERY_TRANSACTION_URL)
return PagSeguroTransactionSearchResult(response.content, self.config)
def query_pre_approvals(self, initial_date, final_date, page=None,
max_results=None):
""" query pre-approvals by date range """
last_page = False
results = []
while last_page is False:
search_result = self._consume_query_pre_approvals(
initial_date, final_date, page, max_results)
results.extend(search_result.pre_approvals)
if search_result.current_page is None or \
search_result.total_pages is None or \
search_result.current_page == search_result.total_pages:
last_page = True
else:
page = search_result.current_page + 1
return results
def _consume_query_pre_approvals(self, initial_date, final_date, page=None,
max_results=None):
querystring = {
'initialDate': initial_date.strftime('%Y-%m-%dT%H:%M'),
'finalDate': final_date.strftime('%Y-%m-%dT%H:%M'),
'page': page,
'maxPageResults': max_results,
}
self.data.update(querystring)
self.clean_none_params()
response = self.get(url=self.config.QUERY_PRE_APPROVAL_URL)
return PagSeguroPreApprovalSearch(response.content, self.config)
def query_pre_approvals_by_code(self, code):
""" query pre-approvals by code """
result = self._consume_query_pre_approvals_by_code(code)
return result
def _consume_query_pre_approvals_by_code(self, code):
response = self.get(
url='%s/%s' % (self.config.QUERY_PRE_APPROVAL_URL, code)
)
return PagSeguroPreApproval(response.content, self.config)
def add_item(self, **kwargs):
self.items.append(kwargs)
|
rochacbruno/python-pagseguro
|
pagseguro/__init__.py
|
PagSeguro.checkout
|
python
|
def checkout(self, transparent=False, **kwargs):
self.data['currency'] = self.config.CURRENCY
self.build_checkout_params(**kwargs)
if transparent:
response = self.post(url=self.config.TRANSPARENT_CHECKOUT_URL)
else:
response = self.post(url=self.config.CHECKOUT_URL)
return PagSeguroCheckoutResponse(response.content, config=self.config)
|
create a pagseguro checkout
|
train
|
https://github.com/rochacbruno/python-pagseguro/blob/18a9ca3301783cb323e838574b59f9ddffa9a593/pagseguro/__init__.py#L217-L225
|
[
"def build_checkout_params(self, **kwargs):\n \"\"\" build a dict with params \"\"\"\n params = kwargs or {}\n if self.sender:\n params['senderName'] = self.sender.get('name')\n params['senderAreaCode'] = self.sender.get('area_code')\n params['senderPhone'] = self.sender.get('phone')\n params['senderEmail'] = is_valid_email(self.sender.get('email'))\n params['senderCPF'] = is_valid_cpf(self.sender.get('cpf'))\n params['senderCNPJ'] = is_valid_cnpj(self.sender.get('cnpj'))\n params['senderBornDate'] = self.sender.get('born_date')\n params['senderHash'] = self.sender.get('hash')\n\n if self.config.USE_SHIPPING:\n if self.shipping:\n params['shippingType'] = self.shipping.get('type')\n params['shippingAddressStreet'] = self.shipping.get('street')\n params['shippingAddressNumber'] = self.shipping.get('number')\n params['shippingAddressComplement'] = self.shipping.get(\n 'complement')\n params['shippingAddressDistrict'] = self.shipping.get(\n 'district')\n params['shippingAddressPostalCode'] = self.shipping.get(\n 'postal_code')\n params['shippingAddressCity'] = self.shipping.get('city')\n params['shippingAddressState'] = self.shipping.get('state')\n params['shippingAddressCountry'] = self.shipping.get('country',\n 'BRA')\n if self.shipping.get('cost'):\n params['shippingCost'] = self.shipping.get('cost')\n else:\n params['shippingAddressRequired'] = 'false'\n\n if self.extra_amount:\n params['extraAmount'] = self.extra_amount\n\n params['reference'] = self.reference\n params['receiverEmail'] = self.data['email']\n\n if self.redirect_url:\n params['redirectURL'] = self.redirect_url\n\n if self.notification_url:\n params['notificationURL'] = self.notification_url\n\n if self.abandon_url:\n params['abandonURL'] = self.abandon_url\n\n for i, item in enumerate(self.items, 1):\n params['itemId%s' % i] = item.get('id')\n params['itemDescription%s' % i] = item.get('description')\n params['itemAmount%s' % i] = item.get('amount')\n params['itemQuantity%s' % i] = item.get('quantity')\n params['itemWeight%s' % i] = item.get('weight')\n params['itemShippingCost%s' % i] = item.get('shipping_cost')\n\n if self.payment:\n\n params['paymentMethod'] = self.payment.get('method')\n params['paymentMode'] = self.payment.get('mode')\n\n if self.credit_card:\n params['billingAddressCountry'] = 'BRA'\n\n credit_card_keys_map = [\n ('creditCardToken', 'credit_card_token'),\n ('installmentQuantity', 'installment_quantity'),\n ('installmentValue', 'installment_value'),\n ('noInterestInstallmentQuantity',\n 'no_interest_installment_quantity'),\n ('creditCardHolderName', 'card_holder_name'),\n ('creditCardHolderCPF', 'card_holder_cpf'),\n ('creditCardHolderBirthDate', 'card_holder_birth_date'),\n ('creditCardHolderAreaCode', 'card_holder_area_code'),\n ('creditCardHolderPhone', 'card_holder_phone'),\n ('billingAddressStreet', 'billing_address_street'),\n ('billingAddressNumber', 'billing_address_number'),\n ('billingAddressComplement', 'billing_address_complement'),\n ('billingAddressDistrict', 'billing_address_district'),\n ('billingAddressPostalCode', 'billing_address_postal_code'),\n ('billingAddressCity', 'billing_address_city'),\n ('billingAddressState', 'billing_address_state'),\n ]\n\n for key_to_set, key_to_get in credit_card_keys_map:\n params[key_to_set] = self.credit_card.get(key_to_get)\n\n if self.pre_approval:\n\n params['preApprovalCharge'] = self.pre_approval.get('charge')\n params['preApprovalName'] = self.pre_approval.get('name')\n params['preApprovalDetails'] = self.pre_approval.get('details')\n params['preApprovalAmountPerPayment'] = self.pre_approval.get(\n 'amount_per_payment')\n params['preApprovalMaxAmountPerPayment'] = self.pre_approval.get(\n 'max_amount_per_payment')\n params['preApprovalPeriod'] = self.pre_approval.get('period')\n params['preApprovalMaxPaymentsPerPeriod'] = self.pre_approval.get(\n 'max_payments_per_period')\n params['preApprovalMaxAmountPerPeriod'] = self.pre_approval.get(\n 'max_amount_per_period')\n params['preApprovalInitialDate'] = self.pre_approval.get(\n 'initial_date')\n params['preApprovalFinalDate'] = self.pre_approval.get(\n 'final_date')\n params['preApprovalMaxTotalAmount'] = self.pre_approval.get(\n 'max_total_amount')\n\n self.data.update(params)\n self.clean_none_params()\n",
"def post(self, url):\n \"\"\" do a post request \"\"\"\n return requests.post(url, data=self.data, headers=self.config.HEADERS)\n"
] |
class PagSeguro(object):
""" Pag Seguro V2 wrapper """
PAC = 1
SEDEX = 2
NONE = 3
def __init__(self, email, token, data=None, config=None):
config = config or {}
if not type(config) == dict:
raise Exception('Malformed config dict param')
self.config = Config(**config)
self.data = {}
self.data['email'] = email
self.data['token'] = token
if data and isinstance(data, dict):
self.data.update(data)
self.items = []
self.sender = {}
self.shipping = {}
self._reference = ""
self.extra_amount = None
self.redirect_url = None
self.notification_url = None
self.abandon_url = None
self.credit_card = {}
self.pre_approval = {}
self.checkout_session = None
self.payment = {}
def build_checkout_params(self, **kwargs):
""" build a dict with params """
params = kwargs or {}
if self.sender:
params['senderName'] = self.sender.get('name')
params['senderAreaCode'] = self.sender.get('area_code')
params['senderPhone'] = self.sender.get('phone')
params['senderEmail'] = is_valid_email(self.sender.get('email'))
params['senderCPF'] = is_valid_cpf(self.sender.get('cpf'))
params['senderCNPJ'] = is_valid_cnpj(self.sender.get('cnpj'))
params['senderBornDate'] = self.sender.get('born_date')
params['senderHash'] = self.sender.get('hash')
if self.config.USE_SHIPPING:
if self.shipping:
params['shippingType'] = self.shipping.get('type')
params['shippingAddressStreet'] = self.shipping.get('street')
params['shippingAddressNumber'] = self.shipping.get('number')
params['shippingAddressComplement'] = self.shipping.get(
'complement')
params['shippingAddressDistrict'] = self.shipping.get(
'district')
params['shippingAddressPostalCode'] = self.shipping.get(
'postal_code')
params['shippingAddressCity'] = self.shipping.get('city')
params['shippingAddressState'] = self.shipping.get('state')
params['shippingAddressCountry'] = self.shipping.get('country',
'BRA')
if self.shipping.get('cost'):
params['shippingCost'] = self.shipping.get('cost')
else:
params['shippingAddressRequired'] = 'false'
if self.extra_amount:
params['extraAmount'] = self.extra_amount
params['reference'] = self.reference
params['receiverEmail'] = self.data['email']
if self.redirect_url:
params['redirectURL'] = self.redirect_url
if self.notification_url:
params['notificationURL'] = self.notification_url
if self.abandon_url:
params['abandonURL'] = self.abandon_url
for i, item in enumerate(self.items, 1):
params['itemId%s' % i] = item.get('id')
params['itemDescription%s' % i] = item.get('description')
params['itemAmount%s' % i] = item.get('amount')
params['itemQuantity%s' % i] = item.get('quantity')
params['itemWeight%s' % i] = item.get('weight')
params['itemShippingCost%s' % i] = item.get('shipping_cost')
if self.payment:
params['paymentMethod'] = self.payment.get('method')
params['paymentMode'] = self.payment.get('mode')
if self.credit_card:
params['billingAddressCountry'] = 'BRA'
credit_card_keys_map = [
('creditCardToken', 'credit_card_token'),
('installmentQuantity', 'installment_quantity'),
('installmentValue', 'installment_value'),
('noInterestInstallmentQuantity',
'no_interest_installment_quantity'),
('creditCardHolderName', 'card_holder_name'),
('creditCardHolderCPF', 'card_holder_cpf'),
('creditCardHolderBirthDate', 'card_holder_birth_date'),
('creditCardHolderAreaCode', 'card_holder_area_code'),
('creditCardHolderPhone', 'card_holder_phone'),
('billingAddressStreet', 'billing_address_street'),
('billingAddressNumber', 'billing_address_number'),
('billingAddressComplement', 'billing_address_complement'),
('billingAddressDistrict', 'billing_address_district'),
('billingAddressPostalCode', 'billing_address_postal_code'),
('billingAddressCity', 'billing_address_city'),
('billingAddressState', 'billing_address_state'),
]
for key_to_set, key_to_get in credit_card_keys_map:
params[key_to_set] = self.credit_card.get(key_to_get)
if self.pre_approval:
params['preApprovalCharge'] = self.pre_approval.get('charge')
params['preApprovalName'] = self.pre_approval.get('name')
params['preApprovalDetails'] = self.pre_approval.get('details')
params['preApprovalAmountPerPayment'] = self.pre_approval.get(
'amount_per_payment')
params['preApprovalMaxAmountPerPayment'] = self.pre_approval.get(
'max_amount_per_payment')
params['preApprovalPeriod'] = self.pre_approval.get('period')
params['preApprovalMaxPaymentsPerPeriod'] = self.pre_approval.get(
'max_payments_per_period')
params['preApprovalMaxAmountPerPeriod'] = self.pre_approval.get(
'max_amount_per_period')
params['preApprovalInitialDate'] = self.pre_approval.get(
'initial_date')
params['preApprovalFinalDate'] = self.pre_approval.get(
'final_date')
params['preApprovalMaxTotalAmount'] = self.pre_approval.get(
'max_total_amount')
self.data.update(params)
self.clean_none_params()
def build_pre_approval_payment_params(self, **kwargs):
""" build a dict with params """
params = kwargs or {}
params['reference'] = self.reference
params['preApprovalCode'] = self.code
for i, item in enumerate(self.items, 1):
params['itemId%s' % i] = item.get('id')
params['itemDescription%s' % i] = item.get('description')
params['itemAmount%s' % i] = item.get('amount')
params['itemQuantity%s' % i] = item.get('quantity')
params['itemWeight%s' % i] = item.get('weight')
params['itemShippingCost%s' % i] = item.get('shipping_cost')
self.data.update(params)
self.clean_none_params()
def clean_none_params(self):
self.data = \
{k: v for k, v in self.data.items() if v or isinstance(v, bool)}
@property
def reference_prefix(self):
return self.config.REFERENCE_PREFIX or "%s"
@reference_prefix.setter
def reference_prefix(self, value):
self.config.REFERENCE_PREFIX = (value or "") + "%s"
@property
def reference(self):
return self.reference_prefix % self._reference
@reference.setter
def reference(self, value):
if not isinstance(value, str):
value = str(value)
if value.startswith(self.reference_prefix):
value = value[len(self.reference_prefix):]
self._reference = value
def get(self, url):
""" do a get transaction """
return requests.get(url, params=self.data, headers=self.config.HEADERS)
def post(self, url):
""" do a post request """
return requests.post(url, data=self.data, headers=self.config.HEADERS)
def transparent_checkout_session(self):
response = self.post(url=self.config.SESSION_CHECKOUT_URL)
return PagSeguroCheckoutSession(response.content,
config=self.config).session_id
def check_notification(self, code):
""" check a notification by its code """
response = self.get(url=self.config.NOTIFICATION_URL % code)
return PagSeguroNotificationResponse(response.content, self.config)
def check_pre_approval_notification(self, code):
""" check a notification by its code """
response = self.get(
url=self.config.PRE_APPROVAL_NOTIFICATION_URL % code)
return PagSeguroPreApprovalNotificationResponse(
response.content, self.config)
def pre_approval_ask_payment(self, **kwargs):
""" ask form a subscribe payment """
self.build_pre_approval_payment_params(**kwargs)
response = self.post(url=self.config.PRE_APPROVAL_PAYMENT_URL)
return PagSeguroPreApprovalPayment(response.content, self.config)
def pre_approval_cancel(self, code):
""" cancel a subscribe """
response = self.get(url=self.config.PRE_APPROVAL_CANCEL_URL % code)
return PagSeguroPreApprovalCancel(response.content, self.config)
def check_transaction(self, code):
""" check a transaction by its code """
response = self.get(url=self.config.TRANSACTION_URL % code)
return PagSeguroNotificationResponse(response.content, self.config)
def query_transactions(self, initial_date, final_date,
page=None,
max_results=None):
""" query transaction by date range """
last_page = False
results = []
while last_page is False:
search_result = self._consume_query_transactions(
initial_date, final_date, page, max_results)
results.extend(search_result.transactions)
if search_result.current_page is None or \
search_result.total_pages is None or \
search_result.current_page == search_result.total_pages:
last_page = True
else:
page = search_result.current_page + 1
return results
def _consume_query_transactions(self, initial_date, final_date,
page=None,
max_results=None):
querystring = {
'initialDate': initial_date.strftime('%Y-%m-%dT%H:%M'),
'finalDate': final_date.strftime('%Y-%m-%dT%H:%M'),
'page': page,
'maxPageResults': max_results,
}
self.data.update(querystring)
self.clean_none_params()
response = self.get(url=self.config.QUERY_TRANSACTION_URL)
return PagSeguroTransactionSearchResult(response.content, self.config)
def query_pre_approvals(self, initial_date, final_date, page=None,
max_results=None):
""" query pre-approvals by date range """
last_page = False
results = []
while last_page is False:
search_result = self._consume_query_pre_approvals(
initial_date, final_date, page, max_results)
results.extend(search_result.pre_approvals)
if search_result.current_page is None or \
search_result.total_pages is None or \
search_result.current_page == search_result.total_pages:
last_page = True
else:
page = search_result.current_page + 1
return results
def _consume_query_pre_approvals(self, initial_date, final_date, page=None,
max_results=None):
querystring = {
'initialDate': initial_date.strftime('%Y-%m-%dT%H:%M'),
'finalDate': final_date.strftime('%Y-%m-%dT%H:%M'),
'page': page,
'maxPageResults': max_results,
}
self.data.update(querystring)
self.clean_none_params()
response = self.get(url=self.config.QUERY_PRE_APPROVAL_URL)
return PagSeguroPreApprovalSearch(response.content, self.config)
def query_pre_approvals_by_code(self, code):
""" query pre-approvals by code """
result = self._consume_query_pre_approvals_by_code(code)
return result
def _consume_query_pre_approvals_by_code(self, code):
response = self.get(
url='%s/%s' % (self.config.QUERY_PRE_APPROVAL_URL, code)
)
return PagSeguroPreApproval(response.content, self.config)
def add_item(self, **kwargs):
self.items.append(kwargs)
|
rochacbruno/python-pagseguro
|
pagseguro/__init__.py
|
PagSeguro.check_notification
|
python
|
def check_notification(self, code):
response = self.get(url=self.config.NOTIFICATION_URL % code)
return PagSeguroNotificationResponse(response.content, self.config)
|
check a notification by its code
|
train
|
https://github.com/rochacbruno/python-pagseguro/blob/18a9ca3301783cb323e838574b59f9ddffa9a593/pagseguro/__init__.py#L232-L235
|
[
"def get(self, url):\n \"\"\" do a get transaction \"\"\"\n return requests.get(url, params=self.data, headers=self.config.HEADERS)\n"
] |
class PagSeguro(object):
""" Pag Seguro V2 wrapper """
PAC = 1
SEDEX = 2
NONE = 3
def __init__(self, email, token, data=None, config=None):
config = config or {}
if not type(config) == dict:
raise Exception('Malformed config dict param')
self.config = Config(**config)
self.data = {}
self.data['email'] = email
self.data['token'] = token
if data and isinstance(data, dict):
self.data.update(data)
self.items = []
self.sender = {}
self.shipping = {}
self._reference = ""
self.extra_amount = None
self.redirect_url = None
self.notification_url = None
self.abandon_url = None
self.credit_card = {}
self.pre_approval = {}
self.checkout_session = None
self.payment = {}
def build_checkout_params(self, **kwargs):
""" build a dict with params """
params = kwargs or {}
if self.sender:
params['senderName'] = self.sender.get('name')
params['senderAreaCode'] = self.sender.get('area_code')
params['senderPhone'] = self.sender.get('phone')
params['senderEmail'] = is_valid_email(self.sender.get('email'))
params['senderCPF'] = is_valid_cpf(self.sender.get('cpf'))
params['senderCNPJ'] = is_valid_cnpj(self.sender.get('cnpj'))
params['senderBornDate'] = self.sender.get('born_date')
params['senderHash'] = self.sender.get('hash')
if self.config.USE_SHIPPING:
if self.shipping:
params['shippingType'] = self.shipping.get('type')
params['shippingAddressStreet'] = self.shipping.get('street')
params['shippingAddressNumber'] = self.shipping.get('number')
params['shippingAddressComplement'] = self.shipping.get(
'complement')
params['shippingAddressDistrict'] = self.shipping.get(
'district')
params['shippingAddressPostalCode'] = self.shipping.get(
'postal_code')
params['shippingAddressCity'] = self.shipping.get('city')
params['shippingAddressState'] = self.shipping.get('state')
params['shippingAddressCountry'] = self.shipping.get('country',
'BRA')
if self.shipping.get('cost'):
params['shippingCost'] = self.shipping.get('cost')
else:
params['shippingAddressRequired'] = 'false'
if self.extra_amount:
params['extraAmount'] = self.extra_amount
params['reference'] = self.reference
params['receiverEmail'] = self.data['email']
if self.redirect_url:
params['redirectURL'] = self.redirect_url
if self.notification_url:
params['notificationURL'] = self.notification_url
if self.abandon_url:
params['abandonURL'] = self.abandon_url
for i, item in enumerate(self.items, 1):
params['itemId%s' % i] = item.get('id')
params['itemDescription%s' % i] = item.get('description')
params['itemAmount%s' % i] = item.get('amount')
params['itemQuantity%s' % i] = item.get('quantity')
params['itemWeight%s' % i] = item.get('weight')
params['itemShippingCost%s' % i] = item.get('shipping_cost')
if self.payment:
params['paymentMethod'] = self.payment.get('method')
params['paymentMode'] = self.payment.get('mode')
if self.credit_card:
params['billingAddressCountry'] = 'BRA'
credit_card_keys_map = [
('creditCardToken', 'credit_card_token'),
('installmentQuantity', 'installment_quantity'),
('installmentValue', 'installment_value'),
('noInterestInstallmentQuantity',
'no_interest_installment_quantity'),
('creditCardHolderName', 'card_holder_name'),
('creditCardHolderCPF', 'card_holder_cpf'),
('creditCardHolderBirthDate', 'card_holder_birth_date'),
('creditCardHolderAreaCode', 'card_holder_area_code'),
('creditCardHolderPhone', 'card_holder_phone'),
('billingAddressStreet', 'billing_address_street'),
('billingAddressNumber', 'billing_address_number'),
('billingAddressComplement', 'billing_address_complement'),
('billingAddressDistrict', 'billing_address_district'),
('billingAddressPostalCode', 'billing_address_postal_code'),
('billingAddressCity', 'billing_address_city'),
('billingAddressState', 'billing_address_state'),
]
for key_to_set, key_to_get in credit_card_keys_map:
params[key_to_set] = self.credit_card.get(key_to_get)
if self.pre_approval:
params['preApprovalCharge'] = self.pre_approval.get('charge')
params['preApprovalName'] = self.pre_approval.get('name')
params['preApprovalDetails'] = self.pre_approval.get('details')
params['preApprovalAmountPerPayment'] = self.pre_approval.get(
'amount_per_payment')
params['preApprovalMaxAmountPerPayment'] = self.pre_approval.get(
'max_amount_per_payment')
params['preApprovalPeriod'] = self.pre_approval.get('period')
params['preApprovalMaxPaymentsPerPeriod'] = self.pre_approval.get(
'max_payments_per_period')
params['preApprovalMaxAmountPerPeriod'] = self.pre_approval.get(
'max_amount_per_period')
params['preApprovalInitialDate'] = self.pre_approval.get(
'initial_date')
params['preApprovalFinalDate'] = self.pre_approval.get(
'final_date')
params['preApprovalMaxTotalAmount'] = self.pre_approval.get(
'max_total_amount')
self.data.update(params)
self.clean_none_params()
def build_pre_approval_payment_params(self, **kwargs):
""" build a dict with params """
params = kwargs or {}
params['reference'] = self.reference
params['preApprovalCode'] = self.code
for i, item in enumerate(self.items, 1):
params['itemId%s' % i] = item.get('id')
params['itemDescription%s' % i] = item.get('description')
params['itemAmount%s' % i] = item.get('amount')
params['itemQuantity%s' % i] = item.get('quantity')
params['itemWeight%s' % i] = item.get('weight')
params['itemShippingCost%s' % i] = item.get('shipping_cost')
self.data.update(params)
self.clean_none_params()
def clean_none_params(self):
self.data = \
{k: v for k, v in self.data.items() if v or isinstance(v, bool)}
@property
def reference_prefix(self):
return self.config.REFERENCE_PREFIX or "%s"
@reference_prefix.setter
def reference_prefix(self, value):
self.config.REFERENCE_PREFIX = (value or "") + "%s"
@property
def reference(self):
return self.reference_prefix % self._reference
@reference.setter
def reference(self, value):
if not isinstance(value, str):
value = str(value)
if value.startswith(self.reference_prefix):
value = value[len(self.reference_prefix):]
self._reference = value
def get(self, url):
""" do a get transaction """
return requests.get(url, params=self.data, headers=self.config.HEADERS)
def post(self, url):
""" do a post request """
return requests.post(url, data=self.data, headers=self.config.HEADERS)
def checkout(self, transparent=False, **kwargs):
""" create a pagseguro checkout """
self.data['currency'] = self.config.CURRENCY
self.build_checkout_params(**kwargs)
if transparent:
response = self.post(url=self.config.TRANSPARENT_CHECKOUT_URL)
else:
response = self.post(url=self.config.CHECKOUT_URL)
return PagSeguroCheckoutResponse(response.content, config=self.config)
def transparent_checkout_session(self):
response = self.post(url=self.config.SESSION_CHECKOUT_URL)
return PagSeguroCheckoutSession(response.content,
config=self.config).session_id
def check_pre_approval_notification(self, code):
""" check a notification by its code """
response = self.get(
url=self.config.PRE_APPROVAL_NOTIFICATION_URL % code)
return PagSeguroPreApprovalNotificationResponse(
response.content, self.config)
def pre_approval_ask_payment(self, **kwargs):
""" ask form a subscribe payment """
self.build_pre_approval_payment_params(**kwargs)
response = self.post(url=self.config.PRE_APPROVAL_PAYMENT_URL)
return PagSeguroPreApprovalPayment(response.content, self.config)
def pre_approval_cancel(self, code):
""" cancel a subscribe """
response = self.get(url=self.config.PRE_APPROVAL_CANCEL_URL % code)
return PagSeguroPreApprovalCancel(response.content, self.config)
def check_transaction(self, code):
""" check a transaction by its code """
response = self.get(url=self.config.TRANSACTION_URL % code)
return PagSeguroNotificationResponse(response.content, self.config)
def query_transactions(self, initial_date, final_date,
page=None,
max_results=None):
""" query transaction by date range """
last_page = False
results = []
while last_page is False:
search_result = self._consume_query_transactions(
initial_date, final_date, page, max_results)
results.extend(search_result.transactions)
if search_result.current_page is None or \
search_result.total_pages is None or \
search_result.current_page == search_result.total_pages:
last_page = True
else:
page = search_result.current_page + 1
return results
def _consume_query_transactions(self, initial_date, final_date,
page=None,
max_results=None):
querystring = {
'initialDate': initial_date.strftime('%Y-%m-%dT%H:%M'),
'finalDate': final_date.strftime('%Y-%m-%dT%H:%M'),
'page': page,
'maxPageResults': max_results,
}
self.data.update(querystring)
self.clean_none_params()
response = self.get(url=self.config.QUERY_TRANSACTION_URL)
return PagSeguroTransactionSearchResult(response.content, self.config)
def query_pre_approvals(self, initial_date, final_date, page=None,
max_results=None):
""" query pre-approvals by date range """
last_page = False
results = []
while last_page is False:
search_result = self._consume_query_pre_approvals(
initial_date, final_date, page, max_results)
results.extend(search_result.pre_approvals)
if search_result.current_page is None or \
search_result.total_pages is None or \
search_result.current_page == search_result.total_pages:
last_page = True
else:
page = search_result.current_page + 1
return results
def _consume_query_pre_approvals(self, initial_date, final_date, page=None,
max_results=None):
querystring = {
'initialDate': initial_date.strftime('%Y-%m-%dT%H:%M'),
'finalDate': final_date.strftime('%Y-%m-%dT%H:%M'),
'page': page,
'maxPageResults': max_results,
}
self.data.update(querystring)
self.clean_none_params()
response = self.get(url=self.config.QUERY_PRE_APPROVAL_URL)
return PagSeguroPreApprovalSearch(response.content, self.config)
def query_pre_approvals_by_code(self, code):
""" query pre-approvals by code """
result = self._consume_query_pre_approvals_by_code(code)
return result
def _consume_query_pre_approvals_by_code(self, code):
response = self.get(
url='%s/%s' % (self.config.QUERY_PRE_APPROVAL_URL, code)
)
return PagSeguroPreApproval(response.content, self.config)
def add_item(self, **kwargs):
self.items.append(kwargs)
|
rochacbruno/python-pagseguro
|
pagseguro/__init__.py
|
PagSeguro.check_pre_approval_notification
|
python
|
def check_pre_approval_notification(self, code):
response = self.get(
url=self.config.PRE_APPROVAL_NOTIFICATION_URL % code)
return PagSeguroPreApprovalNotificationResponse(
response.content, self.config)
|
check a notification by its code
|
train
|
https://github.com/rochacbruno/python-pagseguro/blob/18a9ca3301783cb323e838574b59f9ddffa9a593/pagseguro/__init__.py#L237-L242
|
[
"def get(self, url):\n \"\"\" do a get transaction \"\"\"\n return requests.get(url, params=self.data, headers=self.config.HEADERS)\n"
] |
class PagSeguro(object):
""" Pag Seguro V2 wrapper """
PAC = 1
SEDEX = 2
NONE = 3
def __init__(self, email, token, data=None, config=None):
config = config or {}
if not type(config) == dict:
raise Exception('Malformed config dict param')
self.config = Config(**config)
self.data = {}
self.data['email'] = email
self.data['token'] = token
if data and isinstance(data, dict):
self.data.update(data)
self.items = []
self.sender = {}
self.shipping = {}
self._reference = ""
self.extra_amount = None
self.redirect_url = None
self.notification_url = None
self.abandon_url = None
self.credit_card = {}
self.pre_approval = {}
self.checkout_session = None
self.payment = {}
def build_checkout_params(self, **kwargs):
""" build a dict with params """
params = kwargs or {}
if self.sender:
params['senderName'] = self.sender.get('name')
params['senderAreaCode'] = self.sender.get('area_code')
params['senderPhone'] = self.sender.get('phone')
params['senderEmail'] = is_valid_email(self.sender.get('email'))
params['senderCPF'] = is_valid_cpf(self.sender.get('cpf'))
params['senderCNPJ'] = is_valid_cnpj(self.sender.get('cnpj'))
params['senderBornDate'] = self.sender.get('born_date')
params['senderHash'] = self.sender.get('hash')
if self.config.USE_SHIPPING:
if self.shipping:
params['shippingType'] = self.shipping.get('type')
params['shippingAddressStreet'] = self.shipping.get('street')
params['shippingAddressNumber'] = self.shipping.get('number')
params['shippingAddressComplement'] = self.shipping.get(
'complement')
params['shippingAddressDistrict'] = self.shipping.get(
'district')
params['shippingAddressPostalCode'] = self.shipping.get(
'postal_code')
params['shippingAddressCity'] = self.shipping.get('city')
params['shippingAddressState'] = self.shipping.get('state')
params['shippingAddressCountry'] = self.shipping.get('country',
'BRA')
if self.shipping.get('cost'):
params['shippingCost'] = self.shipping.get('cost')
else:
params['shippingAddressRequired'] = 'false'
if self.extra_amount:
params['extraAmount'] = self.extra_amount
params['reference'] = self.reference
params['receiverEmail'] = self.data['email']
if self.redirect_url:
params['redirectURL'] = self.redirect_url
if self.notification_url:
params['notificationURL'] = self.notification_url
if self.abandon_url:
params['abandonURL'] = self.abandon_url
for i, item in enumerate(self.items, 1):
params['itemId%s' % i] = item.get('id')
params['itemDescription%s' % i] = item.get('description')
params['itemAmount%s' % i] = item.get('amount')
params['itemQuantity%s' % i] = item.get('quantity')
params['itemWeight%s' % i] = item.get('weight')
params['itemShippingCost%s' % i] = item.get('shipping_cost')
if self.payment:
params['paymentMethod'] = self.payment.get('method')
params['paymentMode'] = self.payment.get('mode')
if self.credit_card:
params['billingAddressCountry'] = 'BRA'
credit_card_keys_map = [
('creditCardToken', 'credit_card_token'),
('installmentQuantity', 'installment_quantity'),
('installmentValue', 'installment_value'),
('noInterestInstallmentQuantity',
'no_interest_installment_quantity'),
('creditCardHolderName', 'card_holder_name'),
('creditCardHolderCPF', 'card_holder_cpf'),
('creditCardHolderBirthDate', 'card_holder_birth_date'),
('creditCardHolderAreaCode', 'card_holder_area_code'),
('creditCardHolderPhone', 'card_holder_phone'),
('billingAddressStreet', 'billing_address_street'),
('billingAddressNumber', 'billing_address_number'),
('billingAddressComplement', 'billing_address_complement'),
('billingAddressDistrict', 'billing_address_district'),
('billingAddressPostalCode', 'billing_address_postal_code'),
('billingAddressCity', 'billing_address_city'),
('billingAddressState', 'billing_address_state'),
]
for key_to_set, key_to_get in credit_card_keys_map:
params[key_to_set] = self.credit_card.get(key_to_get)
if self.pre_approval:
params['preApprovalCharge'] = self.pre_approval.get('charge')
params['preApprovalName'] = self.pre_approval.get('name')
params['preApprovalDetails'] = self.pre_approval.get('details')
params['preApprovalAmountPerPayment'] = self.pre_approval.get(
'amount_per_payment')
params['preApprovalMaxAmountPerPayment'] = self.pre_approval.get(
'max_amount_per_payment')
params['preApprovalPeriod'] = self.pre_approval.get('period')
params['preApprovalMaxPaymentsPerPeriod'] = self.pre_approval.get(
'max_payments_per_period')
params['preApprovalMaxAmountPerPeriod'] = self.pre_approval.get(
'max_amount_per_period')
params['preApprovalInitialDate'] = self.pre_approval.get(
'initial_date')
params['preApprovalFinalDate'] = self.pre_approval.get(
'final_date')
params['preApprovalMaxTotalAmount'] = self.pre_approval.get(
'max_total_amount')
self.data.update(params)
self.clean_none_params()
def build_pre_approval_payment_params(self, **kwargs):
""" build a dict with params """
params = kwargs or {}
params['reference'] = self.reference
params['preApprovalCode'] = self.code
for i, item in enumerate(self.items, 1):
params['itemId%s' % i] = item.get('id')
params['itemDescription%s' % i] = item.get('description')
params['itemAmount%s' % i] = item.get('amount')
params['itemQuantity%s' % i] = item.get('quantity')
params['itemWeight%s' % i] = item.get('weight')
params['itemShippingCost%s' % i] = item.get('shipping_cost')
self.data.update(params)
self.clean_none_params()
def clean_none_params(self):
self.data = \
{k: v for k, v in self.data.items() if v or isinstance(v, bool)}
@property
def reference_prefix(self):
return self.config.REFERENCE_PREFIX or "%s"
@reference_prefix.setter
def reference_prefix(self, value):
self.config.REFERENCE_PREFIX = (value or "") + "%s"
@property
def reference(self):
return self.reference_prefix % self._reference
@reference.setter
def reference(self, value):
if not isinstance(value, str):
value = str(value)
if value.startswith(self.reference_prefix):
value = value[len(self.reference_prefix):]
self._reference = value
def get(self, url):
""" do a get transaction """
return requests.get(url, params=self.data, headers=self.config.HEADERS)
def post(self, url):
""" do a post request """
return requests.post(url, data=self.data, headers=self.config.HEADERS)
def checkout(self, transparent=False, **kwargs):
""" create a pagseguro checkout """
self.data['currency'] = self.config.CURRENCY
self.build_checkout_params(**kwargs)
if transparent:
response = self.post(url=self.config.TRANSPARENT_CHECKOUT_URL)
else:
response = self.post(url=self.config.CHECKOUT_URL)
return PagSeguroCheckoutResponse(response.content, config=self.config)
def transparent_checkout_session(self):
response = self.post(url=self.config.SESSION_CHECKOUT_URL)
return PagSeguroCheckoutSession(response.content,
config=self.config).session_id
def check_notification(self, code):
""" check a notification by its code """
response = self.get(url=self.config.NOTIFICATION_URL % code)
return PagSeguroNotificationResponse(response.content, self.config)
def pre_approval_ask_payment(self, **kwargs):
""" ask form a subscribe payment """
self.build_pre_approval_payment_params(**kwargs)
response = self.post(url=self.config.PRE_APPROVAL_PAYMENT_URL)
return PagSeguroPreApprovalPayment(response.content, self.config)
def pre_approval_cancel(self, code):
""" cancel a subscribe """
response = self.get(url=self.config.PRE_APPROVAL_CANCEL_URL % code)
return PagSeguroPreApprovalCancel(response.content, self.config)
def check_transaction(self, code):
""" check a transaction by its code """
response = self.get(url=self.config.TRANSACTION_URL % code)
return PagSeguroNotificationResponse(response.content, self.config)
def query_transactions(self, initial_date, final_date,
page=None,
max_results=None):
""" query transaction by date range """
last_page = False
results = []
while last_page is False:
search_result = self._consume_query_transactions(
initial_date, final_date, page, max_results)
results.extend(search_result.transactions)
if search_result.current_page is None or \
search_result.total_pages is None or \
search_result.current_page == search_result.total_pages:
last_page = True
else:
page = search_result.current_page + 1
return results
def _consume_query_transactions(self, initial_date, final_date,
page=None,
max_results=None):
querystring = {
'initialDate': initial_date.strftime('%Y-%m-%dT%H:%M'),
'finalDate': final_date.strftime('%Y-%m-%dT%H:%M'),
'page': page,
'maxPageResults': max_results,
}
self.data.update(querystring)
self.clean_none_params()
response = self.get(url=self.config.QUERY_TRANSACTION_URL)
return PagSeguroTransactionSearchResult(response.content, self.config)
def query_pre_approvals(self, initial_date, final_date, page=None,
max_results=None):
""" query pre-approvals by date range """
last_page = False
results = []
while last_page is False:
search_result = self._consume_query_pre_approvals(
initial_date, final_date, page, max_results)
results.extend(search_result.pre_approvals)
if search_result.current_page is None or \
search_result.total_pages is None or \
search_result.current_page == search_result.total_pages:
last_page = True
else:
page = search_result.current_page + 1
return results
def _consume_query_pre_approvals(self, initial_date, final_date, page=None,
max_results=None):
querystring = {
'initialDate': initial_date.strftime('%Y-%m-%dT%H:%M'),
'finalDate': final_date.strftime('%Y-%m-%dT%H:%M'),
'page': page,
'maxPageResults': max_results,
}
self.data.update(querystring)
self.clean_none_params()
response = self.get(url=self.config.QUERY_PRE_APPROVAL_URL)
return PagSeguroPreApprovalSearch(response.content, self.config)
def query_pre_approvals_by_code(self, code):
""" query pre-approvals by code """
result = self._consume_query_pre_approvals_by_code(code)
return result
def _consume_query_pre_approvals_by_code(self, code):
response = self.get(
url='%s/%s' % (self.config.QUERY_PRE_APPROVAL_URL, code)
)
return PagSeguroPreApproval(response.content, self.config)
def add_item(self, **kwargs):
self.items.append(kwargs)
|
rochacbruno/python-pagseguro
|
pagseguro/__init__.py
|
PagSeguro.pre_approval_ask_payment
|
python
|
def pre_approval_ask_payment(self, **kwargs):
self.build_pre_approval_payment_params(**kwargs)
response = self.post(url=self.config.PRE_APPROVAL_PAYMENT_URL)
return PagSeguroPreApprovalPayment(response.content, self.config)
|
ask form a subscribe payment
|
train
|
https://github.com/rochacbruno/python-pagseguro/blob/18a9ca3301783cb323e838574b59f9ddffa9a593/pagseguro/__init__.py#L244-L248
|
[
"def build_pre_approval_payment_params(self, **kwargs):\n \"\"\" build a dict with params \"\"\"\n\n params = kwargs or {}\n\n params['reference'] = self.reference\n params['preApprovalCode'] = self.code\n\n for i, item in enumerate(self.items, 1):\n params['itemId%s' % i] = item.get('id')\n params['itemDescription%s' % i] = item.get('description')\n params['itemAmount%s' % i] = item.get('amount')\n params['itemQuantity%s' % i] = item.get('quantity')\n params['itemWeight%s' % i] = item.get('weight')\n params['itemShippingCost%s' % i] = item.get('shipping_cost')\n\n self.data.update(params)\n self.clean_none_params()\n",
"def post(self, url):\n \"\"\" do a post request \"\"\"\n return requests.post(url, data=self.data, headers=self.config.HEADERS)\n"
] |
class PagSeguro(object):
""" Pag Seguro V2 wrapper """
PAC = 1
SEDEX = 2
NONE = 3
def __init__(self, email, token, data=None, config=None):
config = config or {}
if not type(config) == dict:
raise Exception('Malformed config dict param')
self.config = Config(**config)
self.data = {}
self.data['email'] = email
self.data['token'] = token
if data and isinstance(data, dict):
self.data.update(data)
self.items = []
self.sender = {}
self.shipping = {}
self._reference = ""
self.extra_amount = None
self.redirect_url = None
self.notification_url = None
self.abandon_url = None
self.credit_card = {}
self.pre_approval = {}
self.checkout_session = None
self.payment = {}
def build_checkout_params(self, **kwargs):
""" build a dict with params """
params = kwargs or {}
if self.sender:
params['senderName'] = self.sender.get('name')
params['senderAreaCode'] = self.sender.get('area_code')
params['senderPhone'] = self.sender.get('phone')
params['senderEmail'] = is_valid_email(self.sender.get('email'))
params['senderCPF'] = is_valid_cpf(self.sender.get('cpf'))
params['senderCNPJ'] = is_valid_cnpj(self.sender.get('cnpj'))
params['senderBornDate'] = self.sender.get('born_date')
params['senderHash'] = self.sender.get('hash')
if self.config.USE_SHIPPING:
if self.shipping:
params['shippingType'] = self.shipping.get('type')
params['shippingAddressStreet'] = self.shipping.get('street')
params['shippingAddressNumber'] = self.shipping.get('number')
params['shippingAddressComplement'] = self.shipping.get(
'complement')
params['shippingAddressDistrict'] = self.shipping.get(
'district')
params['shippingAddressPostalCode'] = self.shipping.get(
'postal_code')
params['shippingAddressCity'] = self.shipping.get('city')
params['shippingAddressState'] = self.shipping.get('state')
params['shippingAddressCountry'] = self.shipping.get('country',
'BRA')
if self.shipping.get('cost'):
params['shippingCost'] = self.shipping.get('cost')
else:
params['shippingAddressRequired'] = 'false'
if self.extra_amount:
params['extraAmount'] = self.extra_amount
params['reference'] = self.reference
params['receiverEmail'] = self.data['email']
if self.redirect_url:
params['redirectURL'] = self.redirect_url
if self.notification_url:
params['notificationURL'] = self.notification_url
if self.abandon_url:
params['abandonURL'] = self.abandon_url
for i, item in enumerate(self.items, 1):
params['itemId%s' % i] = item.get('id')
params['itemDescription%s' % i] = item.get('description')
params['itemAmount%s' % i] = item.get('amount')
params['itemQuantity%s' % i] = item.get('quantity')
params['itemWeight%s' % i] = item.get('weight')
params['itemShippingCost%s' % i] = item.get('shipping_cost')
if self.payment:
params['paymentMethod'] = self.payment.get('method')
params['paymentMode'] = self.payment.get('mode')
if self.credit_card:
params['billingAddressCountry'] = 'BRA'
credit_card_keys_map = [
('creditCardToken', 'credit_card_token'),
('installmentQuantity', 'installment_quantity'),
('installmentValue', 'installment_value'),
('noInterestInstallmentQuantity',
'no_interest_installment_quantity'),
('creditCardHolderName', 'card_holder_name'),
('creditCardHolderCPF', 'card_holder_cpf'),
('creditCardHolderBirthDate', 'card_holder_birth_date'),
('creditCardHolderAreaCode', 'card_holder_area_code'),
('creditCardHolderPhone', 'card_holder_phone'),
('billingAddressStreet', 'billing_address_street'),
('billingAddressNumber', 'billing_address_number'),
('billingAddressComplement', 'billing_address_complement'),
('billingAddressDistrict', 'billing_address_district'),
('billingAddressPostalCode', 'billing_address_postal_code'),
('billingAddressCity', 'billing_address_city'),
('billingAddressState', 'billing_address_state'),
]
for key_to_set, key_to_get in credit_card_keys_map:
params[key_to_set] = self.credit_card.get(key_to_get)
if self.pre_approval:
params['preApprovalCharge'] = self.pre_approval.get('charge')
params['preApprovalName'] = self.pre_approval.get('name')
params['preApprovalDetails'] = self.pre_approval.get('details')
params['preApprovalAmountPerPayment'] = self.pre_approval.get(
'amount_per_payment')
params['preApprovalMaxAmountPerPayment'] = self.pre_approval.get(
'max_amount_per_payment')
params['preApprovalPeriod'] = self.pre_approval.get('period')
params['preApprovalMaxPaymentsPerPeriod'] = self.pre_approval.get(
'max_payments_per_period')
params['preApprovalMaxAmountPerPeriod'] = self.pre_approval.get(
'max_amount_per_period')
params['preApprovalInitialDate'] = self.pre_approval.get(
'initial_date')
params['preApprovalFinalDate'] = self.pre_approval.get(
'final_date')
params['preApprovalMaxTotalAmount'] = self.pre_approval.get(
'max_total_amount')
self.data.update(params)
self.clean_none_params()
def build_pre_approval_payment_params(self, **kwargs):
""" build a dict with params """
params = kwargs or {}
params['reference'] = self.reference
params['preApprovalCode'] = self.code
for i, item in enumerate(self.items, 1):
params['itemId%s' % i] = item.get('id')
params['itemDescription%s' % i] = item.get('description')
params['itemAmount%s' % i] = item.get('amount')
params['itemQuantity%s' % i] = item.get('quantity')
params['itemWeight%s' % i] = item.get('weight')
params['itemShippingCost%s' % i] = item.get('shipping_cost')
self.data.update(params)
self.clean_none_params()
def clean_none_params(self):
self.data = \
{k: v for k, v in self.data.items() if v or isinstance(v, bool)}
@property
def reference_prefix(self):
return self.config.REFERENCE_PREFIX or "%s"
@reference_prefix.setter
def reference_prefix(self, value):
self.config.REFERENCE_PREFIX = (value or "") + "%s"
@property
def reference(self):
return self.reference_prefix % self._reference
@reference.setter
def reference(self, value):
if not isinstance(value, str):
value = str(value)
if value.startswith(self.reference_prefix):
value = value[len(self.reference_prefix):]
self._reference = value
def get(self, url):
""" do a get transaction """
return requests.get(url, params=self.data, headers=self.config.HEADERS)
def post(self, url):
""" do a post request """
return requests.post(url, data=self.data, headers=self.config.HEADERS)
def checkout(self, transparent=False, **kwargs):
""" create a pagseguro checkout """
self.data['currency'] = self.config.CURRENCY
self.build_checkout_params(**kwargs)
if transparent:
response = self.post(url=self.config.TRANSPARENT_CHECKOUT_URL)
else:
response = self.post(url=self.config.CHECKOUT_URL)
return PagSeguroCheckoutResponse(response.content, config=self.config)
def transparent_checkout_session(self):
response = self.post(url=self.config.SESSION_CHECKOUT_URL)
return PagSeguroCheckoutSession(response.content,
config=self.config).session_id
def check_notification(self, code):
""" check a notification by its code """
response = self.get(url=self.config.NOTIFICATION_URL % code)
return PagSeguroNotificationResponse(response.content, self.config)
def check_pre_approval_notification(self, code):
""" check a notification by its code """
response = self.get(
url=self.config.PRE_APPROVAL_NOTIFICATION_URL % code)
return PagSeguroPreApprovalNotificationResponse(
response.content, self.config)
def pre_approval_cancel(self, code):
""" cancel a subscribe """
response = self.get(url=self.config.PRE_APPROVAL_CANCEL_URL % code)
return PagSeguroPreApprovalCancel(response.content, self.config)
def check_transaction(self, code):
""" check a transaction by its code """
response = self.get(url=self.config.TRANSACTION_URL % code)
return PagSeguroNotificationResponse(response.content, self.config)
def query_transactions(self, initial_date, final_date,
page=None,
max_results=None):
""" query transaction by date range """
last_page = False
results = []
while last_page is False:
search_result = self._consume_query_transactions(
initial_date, final_date, page, max_results)
results.extend(search_result.transactions)
if search_result.current_page is None or \
search_result.total_pages is None or \
search_result.current_page == search_result.total_pages:
last_page = True
else:
page = search_result.current_page + 1
return results
def _consume_query_transactions(self, initial_date, final_date,
page=None,
max_results=None):
querystring = {
'initialDate': initial_date.strftime('%Y-%m-%dT%H:%M'),
'finalDate': final_date.strftime('%Y-%m-%dT%H:%M'),
'page': page,
'maxPageResults': max_results,
}
self.data.update(querystring)
self.clean_none_params()
response = self.get(url=self.config.QUERY_TRANSACTION_URL)
return PagSeguroTransactionSearchResult(response.content, self.config)
def query_pre_approvals(self, initial_date, final_date, page=None,
max_results=None):
""" query pre-approvals by date range """
last_page = False
results = []
while last_page is False:
search_result = self._consume_query_pre_approvals(
initial_date, final_date, page, max_results)
results.extend(search_result.pre_approvals)
if search_result.current_page is None or \
search_result.total_pages is None or \
search_result.current_page == search_result.total_pages:
last_page = True
else:
page = search_result.current_page + 1
return results
def _consume_query_pre_approvals(self, initial_date, final_date, page=None,
max_results=None):
querystring = {
'initialDate': initial_date.strftime('%Y-%m-%dT%H:%M'),
'finalDate': final_date.strftime('%Y-%m-%dT%H:%M'),
'page': page,
'maxPageResults': max_results,
}
self.data.update(querystring)
self.clean_none_params()
response = self.get(url=self.config.QUERY_PRE_APPROVAL_URL)
return PagSeguroPreApprovalSearch(response.content, self.config)
def query_pre_approvals_by_code(self, code):
""" query pre-approvals by code """
result = self._consume_query_pre_approvals_by_code(code)
return result
def _consume_query_pre_approvals_by_code(self, code):
response = self.get(
url='%s/%s' % (self.config.QUERY_PRE_APPROVAL_URL, code)
)
return PagSeguroPreApproval(response.content, self.config)
def add_item(self, **kwargs):
self.items.append(kwargs)
|
rochacbruno/python-pagseguro
|
pagseguro/__init__.py
|
PagSeguro.pre_approval_cancel
|
python
|
def pre_approval_cancel(self, code):
response = self.get(url=self.config.PRE_APPROVAL_CANCEL_URL % code)
return PagSeguroPreApprovalCancel(response.content, self.config)
|
cancel a subscribe
|
train
|
https://github.com/rochacbruno/python-pagseguro/blob/18a9ca3301783cb323e838574b59f9ddffa9a593/pagseguro/__init__.py#L250-L253
|
[
"def get(self, url):\n \"\"\" do a get transaction \"\"\"\n return requests.get(url, params=self.data, headers=self.config.HEADERS)\n"
] |
class PagSeguro(object):
""" Pag Seguro V2 wrapper """
PAC = 1
SEDEX = 2
NONE = 3
def __init__(self, email, token, data=None, config=None):
config = config or {}
if not type(config) == dict:
raise Exception('Malformed config dict param')
self.config = Config(**config)
self.data = {}
self.data['email'] = email
self.data['token'] = token
if data and isinstance(data, dict):
self.data.update(data)
self.items = []
self.sender = {}
self.shipping = {}
self._reference = ""
self.extra_amount = None
self.redirect_url = None
self.notification_url = None
self.abandon_url = None
self.credit_card = {}
self.pre_approval = {}
self.checkout_session = None
self.payment = {}
def build_checkout_params(self, **kwargs):
""" build a dict with params """
params = kwargs or {}
if self.sender:
params['senderName'] = self.sender.get('name')
params['senderAreaCode'] = self.sender.get('area_code')
params['senderPhone'] = self.sender.get('phone')
params['senderEmail'] = is_valid_email(self.sender.get('email'))
params['senderCPF'] = is_valid_cpf(self.sender.get('cpf'))
params['senderCNPJ'] = is_valid_cnpj(self.sender.get('cnpj'))
params['senderBornDate'] = self.sender.get('born_date')
params['senderHash'] = self.sender.get('hash')
if self.config.USE_SHIPPING:
if self.shipping:
params['shippingType'] = self.shipping.get('type')
params['shippingAddressStreet'] = self.shipping.get('street')
params['shippingAddressNumber'] = self.shipping.get('number')
params['shippingAddressComplement'] = self.shipping.get(
'complement')
params['shippingAddressDistrict'] = self.shipping.get(
'district')
params['shippingAddressPostalCode'] = self.shipping.get(
'postal_code')
params['shippingAddressCity'] = self.shipping.get('city')
params['shippingAddressState'] = self.shipping.get('state')
params['shippingAddressCountry'] = self.shipping.get('country',
'BRA')
if self.shipping.get('cost'):
params['shippingCost'] = self.shipping.get('cost')
else:
params['shippingAddressRequired'] = 'false'
if self.extra_amount:
params['extraAmount'] = self.extra_amount
params['reference'] = self.reference
params['receiverEmail'] = self.data['email']
if self.redirect_url:
params['redirectURL'] = self.redirect_url
if self.notification_url:
params['notificationURL'] = self.notification_url
if self.abandon_url:
params['abandonURL'] = self.abandon_url
for i, item in enumerate(self.items, 1):
params['itemId%s' % i] = item.get('id')
params['itemDescription%s' % i] = item.get('description')
params['itemAmount%s' % i] = item.get('amount')
params['itemQuantity%s' % i] = item.get('quantity')
params['itemWeight%s' % i] = item.get('weight')
params['itemShippingCost%s' % i] = item.get('shipping_cost')
if self.payment:
params['paymentMethod'] = self.payment.get('method')
params['paymentMode'] = self.payment.get('mode')
if self.credit_card:
params['billingAddressCountry'] = 'BRA'
credit_card_keys_map = [
('creditCardToken', 'credit_card_token'),
('installmentQuantity', 'installment_quantity'),
('installmentValue', 'installment_value'),
('noInterestInstallmentQuantity',
'no_interest_installment_quantity'),
('creditCardHolderName', 'card_holder_name'),
('creditCardHolderCPF', 'card_holder_cpf'),
('creditCardHolderBirthDate', 'card_holder_birth_date'),
('creditCardHolderAreaCode', 'card_holder_area_code'),
('creditCardHolderPhone', 'card_holder_phone'),
('billingAddressStreet', 'billing_address_street'),
('billingAddressNumber', 'billing_address_number'),
('billingAddressComplement', 'billing_address_complement'),
('billingAddressDistrict', 'billing_address_district'),
('billingAddressPostalCode', 'billing_address_postal_code'),
('billingAddressCity', 'billing_address_city'),
('billingAddressState', 'billing_address_state'),
]
for key_to_set, key_to_get in credit_card_keys_map:
params[key_to_set] = self.credit_card.get(key_to_get)
if self.pre_approval:
params['preApprovalCharge'] = self.pre_approval.get('charge')
params['preApprovalName'] = self.pre_approval.get('name')
params['preApprovalDetails'] = self.pre_approval.get('details')
params['preApprovalAmountPerPayment'] = self.pre_approval.get(
'amount_per_payment')
params['preApprovalMaxAmountPerPayment'] = self.pre_approval.get(
'max_amount_per_payment')
params['preApprovalPeriod'] = self.pre_approval.get('period')
params['preApprovalMaxPaymentsPerPeriod'] = self.pre_approval.get(
'max_payments_per_period')
params['preApprovalMaxAmountPerPeriod'] = self.pre_approval.get(
'max_amount_per_period')
params['preApprovalInitialDate'] = self.pre_approval.get(
'initial_date')
params['preApprovalFinalDate'] = self.pre_approval.get(
'final_date')
params['preApprovalMaxTotalAmount'] = self.pre_approval.get(
'max_total_amount')
self.data.update(params)
self.clean_none_params()
def build_pre_approval_payment_params(self, **kwargs):
""" build a dict with params """
params = kwargs or {}
params['reference'] = self.reference
params['preApprovalCode'] = self.code
for i, item in enumerate(self.items, 1):
params['itemId%s' % i] = item.get('id')
params['itemDescription%s' % i] = item.get('description')
params['itemAmount%s' % i] = item.get('amount')
params['itemQuantity%s' % i] = item.get('quantity')
params['itemWeight%s' % i] = item.get('weight')
params['itemShippingCost%s' % i] = item.get('shipping_cost')
self.data.update(params)
self.clean_none_params()
def clean_none_params(self):
self.data = \
{k: v for k, v in self.data.items() if v or isinstance(v, bool)}
@property
def reference_prefix(self):
return self.config.REFERENCE_PREFIX or "%s"
@reference_prefix.setter
def reference_prefix(self, value):
self.config.REFERENCE_PREFIX = (value or "") + "%s"
@property
def reference(self):
return self.reference_prefix % self._reference
@reference.setter
def reference(self, value):
if not isinstance(value, str):
value = str(value)
if value.startswith(self.reference_prefix):
value = value[len(self.reference_prefix):]
self._reference = value
def get(self, url):
""" do a get transaction """
return requests.get(url, params=self.data, headers=self.config.HEADERS)
def post(self, url):
""" do a post request """
return requests.post(url, data=self.data, headers=self.config.HEADERS)
def checkout(self, transparent=False, **kwargs):
""" create a pagseguro checkout """
self.data['currency'] = self.config.CURRENCY
self.build_checkout_params(**kwargs)
if transparent:
response = self.post(url=self.config.TRANSPARENT_CHECKOUT_URL)
else:
response = self.post(url=self.config.CHECKOUT_URL)
return PagSeguroCheckoutResponse(response.content, config=self.config)
def transparent_checkout_session(self):
response = self.post(url=self.config.SESSION_CHECKOUT_URL)
return PagSeguroCheckoutSession(response.content,
config=self.config).session_id
def check_notification(self, code):
""" check a notification by its code """
response = self.get(url=self.config.NOTIFICATION_URL % code)
return PagSeguroNotificationResponse(response.content, self.config)
def check_pre_approval_notification(self, code):
""" check a notification by its code """
response = self.get(
url=self.config.PRE_APPROVAL_NOTIFICATION_URL % code)
return PagSeguroPreApprovalNotificationResponse(
response.content, self.config)
def pre_approval_ask_payment(self, **kwargs):
""" ask form a subscribe payment """
self.build_pre_approval_payment_params(**kwargs)
response = self.post(url=self.config.PRE_APPROVAL_PAYMENT_URL)
return PagSeguroPreApprovalPayment(response.content, self.config)
def check_transaction(self, code):
""" check a transaction by its code """
response = self.get(url=self.config.TRANSACTION_URL % code)
return PagSeguroNotificationResponse(response.content, self.config)
def query_transactions(self, initial_date, final_date,
page=None,
max_results=None):
""" query transaction by date range """
last_page = False
results = []
while last_page is False:
search_result = self._consume_query_transactions(
initial_date, final_date, page, max_results)
results.extend(search_result.transactions)
if search_result.current_page is None or \
search_result.total_pages is None or \
search_result.current_page == search_result.total_pages:
last_page = True
else:
page = search_result.current_page + 1
return results
def _consume_query_transactions(self, initial_date, final_date,
page=None,
max_results=None):
querystring = {
'initialDate': initial_date.strftime('%Y-%m-%dT%H:%M'),
'finalDate': final_date.strftime('%Y-%m-%dT%H:%M'),
'page': page,
'maxPageResults': max_results,
}
self.data.update(querystring)
self.clean_none_params()
response = self.get(url=self.config.QUERY_TRANSACTION_URL)
return PagSeguroTransactionSearchResult(response.content, self.config)
def query_pre_approvals(self, initial_date, final_date, page=None,
max_results=None):
""" query pre-approvals by date range """
last_page = False
results = []
while last_page is False:
search_result = self._consume_query_pre_approvals(
initial_date, final_date, page, max_results)
results.extend(search_result.pre_approvals)
if search_result.current_page is None or \
search_result.total_pages is None or \
search_result.current_page == search_result.total_pages:
last_page = True
else:
page = search_result.current_page + 1
return results
def _consume_query_pre_approvals(self, initial_date, final_date, page=None,
max_results=None):
querystring = {
'initialDate': initial_date.strftime('%Y-%m-%dT%H:%M'),
'finalDate': final_date.strftime('%Y-%m-%dT%H:%M'),
'page': page,
'maxPageResults': max_results,
}
self.data.update(querystring)
self.clean_none_params()
response = self.get(url=self.config.QUERY_PRE_APPROVAL_URL)
return PagSeguroPreApprovalSearch(response.content, self.config)
def query_pre_approvals_by_code(self, code):
""" query pre-approvals by code """
result = self._consume_query_pre_approvals_by_code(code)
return result
def _consume_query_pre_approvals_by_code(self, code):
response = self.get(
url='%s/%s' % (self.config.QUERY_PRE_APPROVAL_URL, code)
)
return PagSeguroPreApproval(response.content, self.config)
def add_item(self, **kwargs):
self.items.append(kwargs)
|
rochacbruno/python-pagseguro
|
pagseguro/__init__.py
|
PagSeguro.check_transaction
|
python
|
def check_transaction(self, code):
response = self.get(url=self.config.TRANSACTION_URL % code)
return PagSeguroNotificationResponse(response.content, self.config)
|
check a transaction by its code
|
train
|
https://github.com/rochacbruno/python-pagseguro/blob/18a9ca3301783cb323e838574b59f9ddffa9a593/pagseguro/__init__.py#L255-L258
|
[
"def get(self, url):\n \"\"\" do a get transaction \"\"\"\n return requests.get(url, params=self.data, headers=self.config.HEADERS)\n"
] |
class PagSeguro(object):
""" Pag Seguro V2 wrapper """
PAC = 1
SEDEX = 2
NONE = 3
def __init__(self, email, token, data=None, config=None):
config = config or {}
if not type(config) == dict:
raise Exception('Malformed config dict param')
self.config = Config(**config)
self.data = {}
self.data['email'] = email
self.data['token'] = token
if data and isinstance(data, dict):
self.data.update(data)
self.items = []
self.sender = {}
self.shipping = {}
self._reference = ""
self.extra_amount = None
self.redirect_url = None
self.notification_url = None
self.abandon_url = None
self.credit_card = {}
self.pre_approval = {}
self.checkout_session = None
self.payment = {}
def build_checkout_params(self, **kwargs):
""" build a dict with params """
params = kwargs or {}
if self.sender:
params['senderName'] = self.sender.get('name')
params['senderAreaCode'] = self.sender.get('area_code')
params['senderPhone'] = self.sender.get('phone')
params['senderEmail'] = is_valid_email(self.sender.get('email'))
params['senderCPF'] = is_valid_cpf(self.sender.get('cpf'))
params['senderCNPJ'] = is_valid_cnpj(self.sender.get('cnpj'))
params['senderBornDate'] = self.sender.get('born_date')
params['senderHash'] = self.sender.get('hash')
if self.config.USE_SHIPPING:
if self.shipping:
params['shippingType'] = self.shipping.get('type')
params['shippingAddressStreet'] = self.shipping.get('street')
params['shippingAddressNumber'] = self.shipping.get('number')
params['shippingAddressComplement'] = self.shipping.get(
'complement')
params['shippingAddressDistrict'] = self.shipping.get(
'district')
params['shippingAddressPostalCode'] = self.shipping.get(
'postal_code')
params['shippingAddressCity'] = self.shipping.get('city')
params['shippingAddressState'] = self.shipping.get('state')
params['shippingAddressCountry'] = self.shipping.get('country',
'BRA')
if self.shipping.get('cost'):
params['shippingCost'] = self.shipping.get('cost')
else:
params['shippingAddressRequired'] = 'false'
if self.extra_amount:
params['extraAmount'] = self.extra_amount
params['reference'] = self.reference
params['receiverEmail'] = self.data['email']
if self.redirect_url:
params['redirectURL'] = self.redirect_url
if self.notification_url:
params['notificationURL'] = self.notification_url
if self.abandon_url:
params['abandonURL'] = self.abandon_url
for i, item in enumerate(self.items, 1):
params['itemId%s' % i] = item.get('id')
params['itemDescription%s' % i] = item.get('description')
params['itemAmount%s' % i] = item.get('amount')
params['itemQuantity%s' % i] = item.get('quantity')
params['itemWeight%s' % i] = item.get('weight')
params['itemShippingCost%s' % i] = item.get('shipping_cost')
if self.payment:
params['paymentMethod'] = self.payment.get('method')
params['paymentMode'] = self.payment.get('mode')
if self.credit_card:
params['billingAddressCountry'] = 'BRA'
credit_card_keys_map = [
('creditCardToken', 'credit_card_token'),
('installmentQuantity', 'installment_quantity'),
('installmentValue', 'installment_value'),
('noInterestInstallmentQuantity',
'no_interest_installment_quantity'),
('creditCardHolderName', 'card_holder_name'),
('creditCardHolderCPF', 'card_holder_cpf'),
('creditCardHolderBirthDate', 'card_holder_birth_date'),
('creditCardHolderAreaCode', 'card_holder_area_code'),
('creditCardHolderPhone', 'card_holder_phone'),
('billingAddressStreet', 'billing_address_street'),
('billingAddressNumber', 'billing_address_number'),
('billingAddressComplement', 'billing_address_complement'),
('billingAddressDistrict', 'billing_address_district'),
('billingAddressPostalCode', 'billing_address_postal_code'),
('billingAddressCity', 'billing_address_city'),
('billingAddressState', 'billing_address_state'),
]
for key_to_set, key_to_get in credit_card_keys_map:
params[key_to_set] = self.credit_card.get(key_to_get)
if self.pre_approval:
params['preApprovalCharge'] = self.pre_approval.get('charge')
params['preApprovalName'] = self.pre_approval.get('name')
params['preApprovalDetails'] = self.pre_approval.get('details')
params['preApprovalAmountPerPayment'] = self.pre_approval.get(
'amount_per_payment')
params['preApprovalMaxAmountPerPayment'] = self.pre_approval.get(
'max_amount_per_payment')
params['preApprovalPeriod'] = self.pre_approval.get('period')
params['preApprovalMaxPaymentsPerPeriod'] = self.pre_approval.get(
'max_payments_per_period')
params['preApprovalMaxAmountPerPeriod'] = self.pre_approval.get(
'max_amount_per_period')
params['preApprovalInitialDate'] = self.pre_approval.get(
'initial_date')
params['preApprovalFinalDate'] = self.pre_approval.get(
'final_date')
params['preApprovalMaxTotalAmount'] = self.pre_approval.get(
'max_total_amount')
self.data.update(params)
self.clean_none_params()
def build_pre_approval_payment_params(self, **kwargs):
""" build a dict with params """
params = kwargs or {}
params['reference'] = self.reference
params['preApprovalCode'] = self.code
for i, item in enumerate(self.items, 1):
params['itemId%s' % i] = item.get('id')
params['itemDescription%s' % i] = item.get('description')
params['itemAmount%s' % i] = item.get('amount')
params['itemQuantity%s' % i] = item.get('quantity')
params['itemWeight%s' % i] = item.get('weight')
params['itemShippingCost%s' % i] = item.get('shipping_cost')
self.data.update(params)
self.clean_none_params()
def clean_none_params(self):
self.data = \
{k: v for k, v in self.data.items() if v or isinstance(v, bool)}
@property
def reference_prefix(self):
return self.config.REFERENCE_PREFIX or "%s"
@reference_prefix.setter
def reference_prefix(self, value):
self.config.REFERENCE_PREFIX = (value or "") + "%s"
@property
def reference(self):
return self.reference_prefix % self._reference
@reference.setter
def reference(self, value):
if not isinstance(value, str):
value = str(value)
if value.startswith(self.reference_prefix):
value = value[len(self.reference_prefix):]
self._reference = value
def get(self, url):
""" do a get transaction """
return requests.get(url, params=self.data, headers=self.config.HEADERS)
def post(self, url):
""" do a post request """
return requests.post(url, data=self.data, headers=self.config.HEADERS)
def checkout(self, transparent=False, **kwargs):
""" create a pagseguro checkout """
self.data['currency'] = self.config.CURRENCY
self.build_checkout_params(**kwargs)
if transparent:
response = self.post(url=self.config.TRANSPARENT_CHECKOUT_URL)
else:
response = self.post(url=self.config.CHECKOUT_URL)
return PagSeguroCheckoutResponse(response.content, config=self.config)
def transparent_checkout_session(self):
response = self.post(url=self.config.SESSION_CHECKOUT_URL)
return PagSeguroCheckoutSession(response.content,
config=self.config).session_id
def check_notification(self, code):
""" check a notification by its code """
response = self.get(url=self.config.NOTIFICATION_URL % code)
return PagSeguroNotificationResponse(response.content, self.config)
def check_pre_approval_notification(self, code):
""" check a notification by its code """
response = self.get(
url=self.config.PRE_APPROVAL_NOTIFICATION_URL % code)
return PagSeguroPreApprovalNotificationResponse(
response.content, self.config)
def pre_approval_ask_payment(self, **kwargs):
""" ask form a subscribe payment """
self.build_pre_approval_payment_params(**kwargs)
response = self.post(url=self.config.PRE_APPROVAL_PAYMENT_URL)
return PagSeguroPreApprovalPayment(response.content, self.config)
def pre_approval_cancel(self, code):
""" cancel a subscribe """
response = self.get(url=self.config.PRE_APPROVAL_CANCEL_URL % code)
return PagSeguroPreApprovalCancel(response.content, self.config)
def query_transactions(self, initial_date, final_date,
page=None,
max_results=None):
""" query transaction by date range """
last_page = False
results = []
while last_page is False:
search_result = self._consume_query_transactions(
initial_date, final_date, page, max_results)
results.extend(search_result.transactions)
if search_result.current_page is None or \
search_result.total_pages is None or \
search_result.current_page == search_result.total_pages:
last_page = True
else:
page = search_result.current_page + 1
return results
def _consume_query_transactions(self, initial_date, final_date,
page=None,
max_results=None):
querystring = {
'initialDate': initial_date.strftime('%Y-%m-%dT%H:%M'),
'finalDate': final_date.strftime('%Y-%m-%dT%H:%M'),
'page': page,
'maxPageResults': max_results,
}
self.data.update(querystring)
self.clean_none_params()
response = self.get(url=self.config.QUERY_TRANSACTION_URL)
return PagSeguroTransactionSearchResult(response.content, self.config)
def query_pre_approvals(self, initial_date, final_date, page=None,
max_results=None):
""" query pre-approvals by date range """
last_page = False
results = []
while last_page is False:
search_result = self._consume_query_pre_approvals(
initial_date, final_date, page, max_results)
results.extend(search_result.pre_approvals)
if search_result.current_page is None or \
search_result.total_pages is None or \
search_result.current_page == search_result.total_pages:
last_page = True
else:
page = search_result.current_page + 1
return results
def _consume_query_pre_approvals(self, initial_date, final_date, page=None,
max_results=None):
querystring = {
'initialDate': initial_date.strftime('%Y-%m-%dT%H:%M'),
'finalDate': final_date.strftime('%Y-%m-%dT%H:%M'),
'page': page,
'maxPageResults': max_results,
}
self.data.update(querystring)
self.clean_none_params()
response = self.get(url=self.config.QUERY_PRE_APPROVAL_URL)
return PagSeguroPreApprovalSearch(response.content, self.config)
def query_pre_approvals_by_code(self, code):
""" query pre-approvals by code """
result = self._consume_query_pre_approvals_by_code(code)
return result
def _consume_query_pre_approvals_by_code(self, code):
response = self.get(
url='%s/%s' % (self.config.QUERY_PRE_APPROVAL_URL, code)
)
return PagSeguroPreApproval(response.content, self.config)
def add_item(self, **kwargs):
self.items.append(kwargs)
|
rochacbruno/python-pagseguro
|
pagseguro/__init__.py
|
PagSeguro.query_transactions
|
python
|
def query_transactions(self, initial_date, final_date,
page=None,
max_results=None):
last_page = False
results = []
while last_page is False:
search_result = self._consume_query_transactions(
initial_date, final_date, page, max_results)
results.extend(search_result.transactions)
if search_result.current_page is None or \
search_result.total_pages is None or \
search_result.current_page == search_result.total_pages:
last_page = True
else:
page = search_result.current_page + 1
return results
|
query transaction by date range
|
train
|
https://github.com/rochacbruno/python-pagseguro/blob/18a9ca3301783cb323e838574b59f9ddffa9a593/pagseguro/__init__.py#L260-L277
|
[
"def _consume_query_transactions(self, initial_date, final_date,\n page=None,\n max_results=None):\n querystring = {\n 'initialDate': initial_date.strftime('%Y-%m-%dT%H:%M'),\n 'finalDate': final_date.strftime('%Y-%m-%dT%H:%M'),\n 'page': page,\n 'maxPageResults': max_results,\n }\n self.data.update(querystring)\n self.clean_none_params()\n response = self.get(url=self.config.QUERY_TRANSACTION_URL)\n return PagSeguroTransactionSearchResult(response.content, self.config)\n"
] |
class PagSeguro(object):
""" Pag Seguro V2 wrapper """
PAC = 1
SEDEX = 2
NONE = 3
def __init__(self, email, token, data=None, config=None):
config = config or {}
if not type(config) == dict:
raise Exception('Malformed config dict param')
self.config = Config(**config)
self.data = {}
self.data['email'] = email
self.data['token'] = token
if data and isinstance(data, dict):
self.data.update(data)
self.items = []
self.sender = {}
self.shipping = {}
self._reference = ""
self.extra_amount = None
self.redirect_url = None
self.notification_url = None
self.abandon_url = None
self.credit_card = {}
self.pre_approval = {}
self.checkout_session = None
self.payment = {}
def build_checkout_params(self, **kwargs):
""" build a dict with params """
params = kwargs or {}
if self.sender:
params['senderName'] = self.sender.get('name')
params['senderAreaCode'] = self.sender.get('area_code')
params['senderPhone'] = self.sender.get('phone')
params['senderEmail'] = is_valid_email(self.sender.get('email'))
params['senderCPF'] = is_valid_cpf(self.sender.get('cpf'))
params['senderCNPJ'] = is_valid_cnpj(self.sender.get('cnpj'))
params['senderBornDate'] = self.sender.get('born_date')
params['senderHash'] = self.sender.get('hash')
if self.config.USE_SHIPPING:
if self.shipping:
params['shippingType'] = self.shipping.get('type')
params['shippingAddressStreet'] = self.shipping.get('street')
params['shippingAddressNumber'] = self.shipping.get('number')
params['shippingAddressComplement'] = self.shipping.get(
'complement')
params['shippingAddressDistrict'] = self.shipping.get(
'district')
params['shippingAddressPostalCode'] = self.shipping.get(
'postal_code')
params['shippingAddressCity'] = self.shipping.get('city')
params['shippingAddressState'] = self.shipping.get('state')
params['shippingAddressCountry'] = self.shipping.get('country',
'BRA')
if self.shipping.get('cost'):
params['shippingCost'] = self.shipping.get('cost')
else:
params['shippingAddressRequired'] = 'false'
if self.extra_amount:
params['extraAmount'] = self.extra_amount
params['reference'] = self.reference
params['receiverEmail'] = self.data['email']
if self.redirect_url:
params['redirectURL'] = self.redirect_url
if self.notification_url:
params['notificationURL'] = self.notification_url
if self.abandon_url:
params['abandonURL'] = self.abandon_url
for i, item in enumerate(self.items, 1):
params['itemId%s' % i] = item.get('id')
params['itemDescription%s' % i] = item.get('description')
params['itemAmount%s' % i] = item.get('amount')
params['itemQuantity%s' % i] = item.get('quantity')
params['itemWeight%s' % i] = item.get('weight')
params['itemShippingCost%s' % i] = item.get('shipping_cost')
if self.payment:
params['paymentMethod'] = self.payment.get('method')
params['paymentMode'] = self.payment.get('mode')
if self.credit_card:
params['billingAddressCountry'] = 'BRA'
credit_card_keys_map = [
('creditCardToken', 'credit_card_token'),
('installmentQuantity', 'installment_quantity'),
('installmentValue', 'installment_value'),
('noInterestInstallmentQuantity',
'no_interest_installment_quantity'),
('creditCardHolderName', 'card_holder_name'),
('creditCardHolderCPF', 'card_holder_cpf'),
('creditCardHolderBirthDate', 'card_holder_birth_date'),
('creditCardHolderAreaCode', 'card_holder_area_code'),
('creditCardHolderPhone', 'card_holder_phone'),
('billingAddressStreet', 'billing_address_street'),
('billingAddressNumber', 'billing_address_number'),
('billingAddressComplement', 'billing_address_complement'),
('billingAddressDistrict', 'billing_address_district'),
('billingAddressPostalCode', 'billing_address_postal_code'),
('billingAddressCity', 'billing_address_city'),
('billingAddressState', 'billing_address_state'),
]
for key_to_set, key_to_get in credit_card_keys_map:
params[key_to_set] = self.credit_card.get(key_to_get)
if self.pre_approval:
params['preApprovalCharge'] = self.pre_approval.get('charge')
params['preApprovalName'] = self.pre_approval.get('name')
params['preApprovalDetails'] = self.pre_approval.get('details')
params['preApprovalAmountPerPayment'] = self.pre_approval.get(
'amount_per_payment')
params['preApprovalMaxAmountPerPayment'] = self.pre_approval.get(
'max_amount_per_payment')
params['preApprovalPeriod'] = self.pre_approval.get('period')
params['preApprovalMaxPaymentsPerPeriod'] = self.pre_approval.get(
'max_payments_per_period')
params['preApprovalMaxAmountPerPeriod'] = self.pre_approval.get(
'max_amount_per_period')
params['preApprovalInitialDate'] = self.pre_approval.get(
'initial_date')
params['preApprovalFinalDate'] = self.pre_approval.get(
'final_date')
params['preApprovalMaxTotalAmount'] = self.pre_approval.get(
'max_total_amount')
self.data.update(params)
self.clean_none_params()
def build_pre_approval_payment_params(self, **kwargs):
""" build a dict with params """
params = kwargs or {}
params['reference'] = self.reference
params['preApprovalCode'] = self.code
for i, item in enumerate(self.items, 1):
params['itemId%s' % i] = item.get('id')
params['itemDescription%s' % i] = item.get('description')
params['itemAmount%s' % i] = item.get('amount')
params['itemQuantity%s' % i] = item.get('quantity')
params['itemWeight%s' % i] = item.get('weight')
params['itemShippingCost%s' % i] = item.get('shipping_cost')
self.data.update(params)
self.clean_none_params()
def clean_none_params(self):
self.data = \
{k: v for k, v in self.data.items() if v or isinstance(v, bool)}
@property
def reference_prefix(self):
return self.config.REFERENCE_PREFIX or "%s"
@reference_prefix.setter
def reference_prefix(self, value):
self.config.REFERENCE_PREFIX = (value or "") + "%s"
@property
def reference(self):
return self.reference_prefix % self._reference
@reference.setter
def reference(self, value):
if not isinstance(value, str):
value = str(value)
if value.startswith(self.reference_prefix):
value = value[len(self.reference_prefix):]
self._reference = value
def get(self, url):
""" do a get transaction """
return requests.get(url, params=self.data, headers=self.config.HEADERS)
def post(self, url):
""" do a post request """
return requests.post(url, data=self.data, headers=self.config.HEADERS)
def checkout(self, transparent=False, **kwargs):
""" create a pagseguro checkout """
self.data['currency'] = self.config.CURRENCY
self.build_checkout_params(**kwargs)
if transparent:
response = self.post(url=self.config.TRANSPARENT_CHECKOUT_URL)
else:
response = self.post(url=self.config.CHECKOUT_URL)
return PagSeguroCheckoutResponse(response.content, config=self.config)
def transparent_checkout_session(self):
response = self.post(url=self.config.SESSION_CHECKOUT_URL)
return PagSeguroCheckoutSession(response.content,
config=self.config).session_id
def check_notification(self, code):
""" check a notification by its code """
response = self.get(url=self.config.NOTIFICATION_URL % code)
return PagSeguroNotificationResponse(response.content, self.config)
def check_pre_approval_notification(self, code):
""" check a notification by its code """
response = self.get(
url=self.config.PRE_APPROVAL_NOTIFICATION_URL % code)
return PagSeguroPreApprovalNotificationResponse(
response.content, self.config)
def pre_approval_ask_payment(self, **kwargs):
""" ask form a subscribe payment """
self.build_pre_approval_payment_params(**kwargs)
response = self.post(url=self.config.PRE_APPROVAL_PAYMENT_URL)
return PagSeguroPreApprovalPayment(response.content, self.config)
def pre_approval_cancel(self, code):
""" cancel a subscribe """
response = self.get(url=self.config.PRE_APPROVAL_CANCEL_URL % code)
return PagSeguroPreApprovalCancel(response.content, self.config)
def check_transaction(self, code):
""" check a transaction by its code """
response = self.get(url=self.config.TRANSACTION_URL % code)
return PagSeguroNotificationResponse(response.content, self.config)
def _consume_query_transactions(self, initial_date, final_date,
page=None,
max_results=None):
querystring = {
'initialDate': initial_date.strftime('%Y-%m-%dT%H:%M'),
'finalDate': final_date.strftime('%Y-%m-%dT%H:%M'),
'page': page,
'maxPageResults': max_results,
}
self.data.update(querystring)
self.clean_none_params()
response = self.get(url=self.config.QUERY_TRANSACTION_URL)
return PagSeguroTransactionSearchResult(response.content, self.config)
def query_pre_approvals(self, initial_date, final_date, page=None,
max_results=None):
""" query pre-approvals by date range """
last_page = False
results = []
while last_page is False:
search_result = self._consume_query_pre_approvals(
initial_date, final_date, page, max_results)
results.extend(search_result.pre_approvals)
if search_result.current_page is None or \
search_result.total_pages is None or \
search_result.current_page == search_result.total_pages:
last_page = True
else:
page = search_result.current_page + 1
return results
def _consume_query_pre_approvals(self, initial_date, final_date, page=None,
max_results=None):
querystring = {
'initialDate': initial_date.strftime('%Y-%m-%dT%H:%M'),
'finalDate': final_date.strftime('%Y-%m-%dT%H:%M'),
'page': page,
'maxPageResults': max_results,
}
self.data.update(querystring)
self.clean_none_params()
response = self.get(url=self.config.QUERY_PRE_APPROVAL_URL)
return PagSeguroPreApprovalSearch(response.content, self.config)
def query_pre_approvals_by_code(self, code):
""" query pre-approvals by code """
result = self._consume_query_pre_approvals_by_code(code)
return result
def _consume_query_pre_approvals_by_code(self, code):
response = self.get(
url='%s/%s' % (self.config.QUERY_PRE_APPROVAL_URL, code)
)
return PagSeguroPreApproval(response.content, self.config)
def add_item(self, **kwargs):
self.items.append(kwargs)
|
rochacbruno/python-pagseguro
|
pagseguro/__init__.py
|
PagSeguro.query_pre_approvals
|
python
|
def query_pre_approvals(self, initial_date, final_date, page=None,
max_results=None):
last_page = False
results = []
while last_page is False:
search_result = self._consume_query_pre_approvals(
initial_date, final_date, page, max_results)
results.extend(search_result.pre_approvals)
if search_result.current_page is None or \
search_result.total_pages is None or \
search_result.current_page == search_result.total_pages:
last_page = True
else:
page = search_result.current_page + 1
return results
|
query pre-approvals by date range
|
train
|
https://github.com/rochacbruno/python-pagseguro/blob/18a9ca3301783cb323e838574b59f9ddffa9a593/pagseguro/__init__.py#L293-L309
|
[
"def _consume_query_pre_approvals(self, initial_date, final_date, page=None,\n max_results=None):\n querystring = {\n 'initialDate': initial_date.strftime('%Y-%m-%dT%H:%M'),\n 'finalDate': final_date.strftime('%Y-%m-%dT%H:%M'),\n 'page': page,\n 'maxPageResults': max_results,\n }\n\n self.data.update(querystring)\n self.clean_none_params()\n\n response = self.get(url=self.config.QUERY_PRE_APPROVAL_URL)\n return PagSeguroPreApprovalSearch(response.content, self.config)\n"
] |
class PagSeguro(object):
""" Pag Seguro V2 wrapper """
PAC = 1
SEDEX = 2
NONE = 3
def __init__(self, email, token, data=None, config=None):
config = config or {}
if not type(config) == dict:
raise Exception('Malformed config dict param')
self.config = Config(**config)
self.data = {}
self.data['email'] = email
self.data['token'] = token
if data and isinstance(data, dict):
self.data.update(data)
self.items = []
self.sender = {}
self.shipping = {}
self._reference = ""
self.extra_amount = None
self.redirect_url = None
self.notification_url = None
self.abandon_url = None
self.credit_card = {}
self.pre_approval = {}
self.checkout_session = None
self.payment = {}
def build_checkout_params(self, **kwargs):
""" build a dict with params """
params = kwargs or {}
if self.sender:
params['senderName'] = self.sender.get('name')
params['senderAreaCode'] = self.sender.get('area_code')
params['senderPhone'] = self.sender.get('phone')
params['senderEmail'] = is_valid_email(self.sender.get('email'))
params['senderCPF'] = is_valid_cpf(self.sender.get('cpf'))
params['senderCNPJ'] = is_valid_cnpj(self.sender.get('cnpj'))
params['senderBornDate'] = self.sender.get('born_date')
params['senderHash'] = self.sender.get('hash')
if self.config.USE_SHIPPING:
if self.shipping:
params['shippingType'] = self.shipping.get('type')
params['shippingAddressStreet'] = self.shipping.get('street')
params['shippingAddressNumber'] = self.shipping.get('number')
params['shippingAddressComplement'] = self.shipping.get(
'complement')
params['shippingAddressDistrict'] = self.shipping.get(
'district')
params['shippingAddressPostalCode'] = self.shipping.get(
'postal_code')
params['shippingAddressCity'] = self.shipping.get('city')
params['shippingAddressState'] = self.shipping.get('state')
params['shippingAddressCountry'] = self.shipping.get('country',
'BRA')
if self.shipping.get('cost'):
params['shippingCost'] = self.shipping.get('cost')
else:
params['shippingAddressRequired'] = 'false'
if self.extra_amount:
params['extraAmount'] = self.extra_amount
params['reference'] = self.reference
params['receiverEmail'] = self.data['email']
if self.redirect_url:
params['redirectURL'] = self.redirect_url
if self.notification_url:
params['notificationURL'] = self.notification_url
if self.abandon_url:
params['abandonURL'] = self.abandon_url
for i, item in enumerate(self.items, 1):
params['itemId%s' % i] = item.get('id')
params['itemDescription%s' % i] = item.get('description')
params['itemAmount%s' % i] = item.get('amount')
params['itemQuantity%s' % i] = item.get('quantity')
params['itemWeight%s' % i] = item.get('weight')
params['itemShippingCost%s' % i] = item.get('shipping_cost')
if self.payment:
params['paymentMethod'] = self.payment.get('method')
params['paymentMode'] = self.payment.get('mode')
if self.credit_card:
params['billingAddressCountry'] = 'BRA'
credit_card_keys_map = [
('creditCardToken', 'credit_card_token'),
('installmentQuantity', 'installment_quantity'),
('installmentValue', 'installment_value'),
('noInterestInstallmentQuantity',
'no_interest_installment_quantity'),
('creditCardHolderName', 'card_holder_name'),
('creditCardHolderCPF', 'card_holder_cpf'),
('creditCardHolderBirthDate', 'card_holder_birth_date'),
('creditCardHolderAreaCode', 'card_holder_area_code'),
('creditCardHolderPhone', 'card_holder_phone'),
('billingAddressStreet', 'billing_address_street'),
('billingAddressNumber', 'billing_address_number'),
('billingAddressComplement', 'billing_address_complement'),
('billingAddressDistrict', 'billing_address_district'),
('billingAddressPostalCode', 'billing_address_postal_code'),
('billingAddressCity', 'billing_address_city'),
('billingAddressState', 'billing_address_state'),
]
for key_to_set, key_to_get in credit_card_keys_map:
params[key_to_set] = self.credit_card.get(key_to_get)
if self.pre_approval:
params['preApprovalCharge'] = self.pre_approval.get('charge')
params['preApprovalName'] = self.pre_approval.get('name')
params['preApprovalDetails'] = self.pre_approval.get('details')
params['preApprovalAmountPerPayment'] = self.pre_approval.get(
'amount_per_payment')
params['preApprovalMaxAmountPerPayment'] = self.pre_approval.get(
'max_amount_per_payment')
params['preApprovalPeriod'] = self.pre_approval.get('period')
params['preApprovalMaxPaymentsPerPeriod'] = self.pre_approval.get(
'max_payments_per_period')
params['preApprovalMaxAmountPerPeriod'] = self.pre_approval.get(
'max_amount_per_period')
params['preApprovalInitialDate'] = self.pre_approval.get(
'initial_date')
params['preApprovalFinalDate'] = self.pre_approval.get(
'final_date')
params['preApprovalMaxTotalAmount'] = self.pre_approval.get(
'max_total_amount')
self.data.update(params)
self.clean_none_params()
def build_pre_approval_payment_params(self, **kwargs):
""" build a dict with params """
params = kwargs or {}
params['reference'] = self.reference
params['preApprovalCode'] = self.code
for i, item in enumerate(self.items, 1):
params['itemId%s' % i] = item.get('id')
params['itemDescription%s' % i] = item.get('description')
params['itemAmount%s' % i] = item.get('amount')
params['itemQuantity%s' % i] = item.get('quantity')
params['itemWeight%s' % i] = item.get('weight')
params['itemShippingCost%s' % i] = item.get('shipping_cost')
self.data.update(params)
self.clean_none_params()
def clean_none_params(self):
self.data = \
{k: v for k, v in self.data.items() if v or isinstance(v, bool)}
@property
def reference_prefix(self):
return self.config.REFERENCE_PREFIX or "%s"
@reference_prefix.setter
def reference_prefix(self, value):
self.config.REFERENCE_PREFIX = (value or "") + "%s"
@property
def reference(self):
return self.reference_prefix % self._reference
@reference.setter
def reference(self, value):
if not isinstance(value, str):
value = str(value)
if value.startswith(self.reference_prefix):
value = value[len(self.reference_prefix):]
self._reference = value
def get(self, url):
""" do a get transaction """
return requests.get(url, params=self.data, headers=self.config.HEADERS)
def post(self, url):
""" do a post request """
return requests.post(url, data=self.data, headers=self.config.HEADERS)
def checkout(self, transparent=False, **kwargs):
""" create a pagseguro checkout """
self.data['currency'] = self.config.CURRENCY
self.build_checkout_params(**kwargs)
if transparent:
response = self.post(url=self.config.TRANSPARENT_CHECKOUT_URL)
else:
response = self.post(url=self.config.CHECKOUT_URL)
return PagSeguroCheckoutResponse(response.content, config=self.config)
def transparent_checkout_session(self):
response = self.post(url=self.config.SESSION_CHECKOUT_URL)
return PagSeguroCheckoutSession(response.content,
config=self.config).session_id
def check_notification(self, code):
""" check a notification by its code """
response = self.get(url=self.config.NOTIFICATION_URL % code)
return PagSeguroNotificationResponse(response.content, self.config)
def check_pre_approval_notification(self, code):
""" check a notification by its code """
response = self.get(
url=self.config.PRE_APPROVAL_NOTIFICATION_URL % code)
return PagSeguroPreApprovalNotificationResponse(
response.content, self.config)
def pre_approval_ask_payment(self, **kwargs):
""" ask form a subscribe payment """
self.build_pre_approval_payment_params(**kwargs)
response = self.post(url=self.config.PRE_APPROVAL_PAYMENT_URL)
return PagSeguroPreApprovalPayment(response.content, self.config)
def pre_approval_cancel(self, code):
""" cancel a subscribe """
response = self.get(url=self.config.PRE_APPROVAL_CANCEL_URL % code)
return PagSeguroPreApprovalCancel(response.content, self.config)
def check_transaction(self, code):
""" check a transaction by its code """
response = self.get(url=self.config.TRANSACTION_URL % code)
return PagSeguroNotificationResponse(response.content, self.config)
def query_transactions(self, initial_date, final_date,
page=None,
max_results=None):
""" query transaction by date range """
last_page = False
results = []
while last_page is False:
search_result = self._consume_query_transactions(
initial_date, final_date, page, max_results)
results.extend(search_result.transactions)
if search_result.current_page is None or \
search_result.total_pages is None or \
search_result.current_page == search_result.total_pages:
last_page = True
else:
page = search_result.current_page + 1
return results
def _consume_query_transactions(self, initial_date, final_date,
page=None,
max_results=None):
querystring = {
'initialDate': initial_date.strftime('%Y-%m-%dT%H:%M'),
'finalDate': final_date.strftime('%Y-%m-%dT%H:%M'),
'page': page,
'maxPageResults': max_results,
}
self.data.update(querystring)
self.clean_none_params()
response = self.get(url=self.config.QUERY_TRANSACTION_URL)
return PagSeguroTransactionSearchResult(response.content, self.config)
def _consume_query_pre_approvals(self, initial_date, final_date, page=None,
max_results=None):
querystring = {
'initialDate': initial_date.strftime('%Y-%m-%dT%H:%M'),
'finalDate': final_date.strftime('%Y-%m-%dT%H:%M'),
'page': page,
'maxPageResults': max_results,
}
self.data.update(querystring)
self.clean_none_params()
response = self.get(url=self.config.QUERY_PRE_APPROVAL_URL)
return PagSeguroPreApprovalSearch(response.content, self.config)
def query_pre_approvals_by_code(self, code):
""" query pre-approvals by code """
result = self._consume_query_pre_approvals_by_code(code)
return result
def _consume_query_pre_approvals_by_code(self, code):
response = self.get(
url='%s/%s' % (self.config.QUERY_PRE_APPROVAL_URL, code)
)
return PagSeguroPreApproval(response.content, self.config)
def add_item(self, **kwargs):
self.items.append(kwargs)
|
rochacbruno/python-pagseguro
|
examples/flask/flask_seguro/controllers/main/__init__.py
|
add_to_cart
|
python
|
def add_to_cart(item_id):
cart = Cart(session['cart'])
if cart.change_item(item_id, 'add'):
session['cart'] = cart.to_dict()
return list_products()
|
Cart with Product
|
train
|
https://github.com/rochacbruno/python-pagseguro/blob/18a9ca3301783cb323e838574b59f9ddffa9a593/examples/flask/flask_seguro/controllers/main/__init__.py#L44-L49
| null |
# -*- coding: utf-8 -*-
""" Main Controllers """
from flask import session
from flask import jsonify
from flask import request
from flask import redirect
from flask import render_template
from flask import current_app as app
from pagseguro import PagSeguro
from flask_seguro.products import Products
from flask_seguro.cart import Cart
from .views import main
@main.before_request
def before_request():
if 'cart' not in session:
session['cart'] = Cart().to_dict()
@main.route('/')
def index():
""" Index Route """
return list_products()
@main.route('/cart')
def cart():
""" Cart Route """
return render_template('cart.jinja2', cart=session['cart'])
@main.route('/products/list')
def list_products():
""" Product list """
products = Products().get_all()
return render_template('products.jinja2',
products=products,
cart=session['cart'])
@main.route('/cart/add/<item_id>')
@main.route('/cart/remove/<item_id>')
def remove_from_cart(item_id):
cart = Cart(session['cart'])
if cart.change_item(item_id, 'remove'):
session['cart'] = cart.to_dict()
return list_products()
@main.route('/notification')
def notification_view(request):
notification_code = request.POST['notificationCode']
pg = PagSeguro(email=app.config['EMAIL'], token=app.config['TOKEN'])
pg.check_notification(notification_code)
# use the return of the function above to update the order
@main.route('/checkout', methods=['GET'])
def checkout_get():
return render_template('checkout.jinja2')
@main.route('/checkout', methods=['POST'])
def checkout_post():
for field in ['name', 'email', 'street', 'number', 'complement',
'district', 'postal_code', 'city', 'state']:
if not request.form.get(field, False):
return jsonify({'error_msg': 'Todos os campos são obrigatórios.'})
cart = Cart(session['cart'])
if len(cart.items) == 0:
return jsonify({'error_msg': 'Seu carrinho está vazio.'})
sender = {
"name": request.form.get("name"),
"email": request.form.get("email"),
}
shipping = {
"street": request.form.get("street"),
"number": request.form.get("number"),
"complement": request.form.get("complement"),
"district": request.form.get("district"),
"postal_code": request.form.get("postal_code"),
"city": request.form.get("city"),
"state": request.form.get("state"),
"country": 'BRA'
}
pagseguro = checkout_pg(sender, shipping, cart)
response = pagseguro.checkout()
return redirect(response.payment_url)
def checkout_pg(sender, shipping, cart):
pagseguro = PagSeguro(email=app.config['EMAIL'], token=app.config['TOKEN'])
pagseguro.sender = sender
shipping['type'] = pagseguro.SEDEX
pagseguro.shipping = shipping
pagseguro.extra_amount = "%.2f" % float(app.config['EXTRA_AMOUNT'])
pagseguro.redirect_url = app.config['REDIRECT_URL']
pagseguro.notification_url = app.config['NOTIFICATION_URL']
pagseguro.items = cart.items
for item in cart.items:
item['amount'] = "%.2f" % float(app.config['EXTRA_AMOUNT'])
return pagseguro
|
rochacbruno/python-pagseguro
|
examples/flask/flask_seguro/cart.py
|
Cart.to_dict
|
python
|
def to_dict(self):
return {
"total": self.total,
"subtotal": self.subtotal,
"items": self.items,
"extra_amount": self.extra_amount
}
|
Attribute values to dict
|
train
|
https://github.com/rochacbruno/python-pagseguro/blob/18a9ca3301783cb323e838574b59f9ddffa9a593/examples/flask/flask_seguro/cart.py#L24-L32
| null |
class Cart(object):
""" The classe is responsable for cart in webpage """
def __init__(self, cart_dict=None):
""" Initializing class """
cart_dict = cart_dict or {}
if cart_dict == {}:
self.total = 0
self.subtotal = 0
self.items = []
else:
self.total = cart_dict["total"]
self.subtotal = cart_dict["subtotal"]
self.items = cart_dict["items"]
self.extra_amount = float(app.config['EXTRA_AMOUNT'])
def change_item(self, item_id, operation):
""" Remove items in cart """
product = Products().get_one(item_id)
if product:
if operation == 'add':
self.items.append(product)
elif operation == 'remove':
cart_p = [x for x in self.items if x['id'] == product['id']]
self.items.remove(cart_p[0])
self.update()
return True
else:
return False
def update(self):
""" Remove items in cart """
subtotal = float(0)
total = float(0)
for product in self.items:
subtotal += float(product["price"])
if subtotal > 0:
total = subtotal + self.extra_amount
self.subtotal = subtotal
self.total = total
|
rochacbruno/python-pagseguro
|
examples/flask/flask_seguro/cart.py
|
Cart.change_item
|
python
|
def change_item(self, item_id, operation):
product = Products().get_one(item_id)
if product:
if operation == 'add':
self.items.append(product)
elif operation == 'remove':
cart_p = [x for x in self.items if x['id'] == product['id']]
self.items.remove(cart_p[0])
self.update()
return True
else:
return False
|
Remove items in cart
|
train
|
https://github.com/rochacbruno/python-pagseguro/blob/18a9ca3301783cb323e838574b59f9ddffa9a593/examples/flask/flask_seguro/cart.py#L34-L47
|
[
"def update(self):\n \"\"\" Remove items in cart \"\"\"\n\n subtotal = float(0)\n total = float(0)\n for product in self.items:\n subtotal += float(product[\"price\"])\n if subtotal > 0:\n total = subtotal + self.extra_amount\n self.subtotal = subtotal\n self.total = total\n"
] |
class Cart(object):
""" The classe is responsable for cart in webpage """
def __init__(self, cart_dict=None):
""" Initializing class """
cart_dict = cart_dict or {}
if cart_dict == {}:
self.total = 0
self.subtotal = 0
self.items = []
else:
self.total = cart_dict["total"]
self.subtotal = cart_dict["subtotal"]
self.items = cart_dict["items"]
self.extra_amount = float(app.config['EXTRA_AMOUNT'])
def to_dict(self):
""" Attribute values to dict """
return {
"total": self.total,
"subtotal": self.subtotal,
"items": self.items,
"extra_amount": self.extra_amount
}
def update(self):
""" Remove items in cart """
subtotal = float(0)
total = float(0)
for product in self.items:
subtotal += float(product["price"])
if subtotal > 0:
total = subtotal + self.extra_amount
self.subtotal = subtotal
self.total = total
|
rochacbruno/python-pagseguro
|
examples/flask/flask_seguro/cart.py
|
Cart.update
|
python
|
def update(self):
subtotal = float(0)
total = float(0)
for product in self.items:
subtotal += float(product["price"])
if subtotal > 0:
total = subtotal + self.extra_amount
self.subtotal = subtotal
self.total = total
|
Remove items in cart
|
train
|
https://github.com/rochacbruno/python-pagseguro/blob/18a9ca3301783cb323e838574b59f9ddffa9a593/examples/flask/flask_seguro/cart.py#L49-L59
| null |
class Cart(object):
""" The classe is responsable for cart in webpage """
def __init__(self, cart_dict=None):
""" Initializing class """
cart_dict = cart_dict or {}
if cart_dict == {}:
self.total = 0
self.subtotal = 0
self.items = []
else:
self.total = cart_dict["total"]
self.subtotal = cart_dict["subtotal"]
self.items = cart_dict["items"]
self.extra_amount = float(app.config['EXTRA_AMOUNT'])
def to_dict(self):
""" Attribute values to dict """
return {
"total": self.total,
"subtotal": self.subtotal,
"items": self.items,
"extra_amount": self.extra_amount
}
def change_item(self, item_id, operation):
""" Remove items in cart """
product = Products().get_one(item_id)
if product:
if operation == 'add':
self.items.append(product)
elif operation == 'remove':
cart_p = [x for x in self.items if x['id'] == product['id']]
self.items.remove(cart_p[0])
self.update()
return True
else:
return False
|
senaite/senaite.lims
|
src/senaite/lims/browser/bootstrap/viewlets.py
|
ViewletView.getViewletByName
|
python
|
def getViewletByName(self, name):
views = registration.getViews(IBrowserRequest)
for v in views:
if v.provided == IViewlet:
# Note that we might have conflicting BrowserView with the same
# name, thus we need to check for provided
if v.name == name:
return v
return None
|
Viewlets allow through-the-web customizations.
Through-the-web customization magic is managed by five.customerize.
We need to think of this when looking up viewlets.
@return: Viewlet registration object
|
train
|
https://github.com/senaite/senaite.lims/blob/3c7fc7b462321fb354c478c19b5c20f3014fa398/src/senaite/lims/browser/bootstrap/viewlets.py#L162-L181
| null |
class ViewletView(BrowserView):
"""Expose arbitrary viewlets to traversing by name.
Example how to render plone.logo viewlet in arbitrary template code point::
<div tal:content="context/@@viewlets/plone.logo" />
https://docs.plone.org/develop/plone/views/viewlets.html#rendering-viewlet-by-name
"""
def __init__(self, context, request):
super(ViewletView, self).__init__(context, request)
self.context = context
self.request = request
def setupViewletByName(self, name):
""" Constructs a viewlet instance by its name.
Viewlet update() and render() method are not called.
@return: Viewlet instance of None if viewlet with name does not exist
"""
context = aq_inner(self.context)
request = self.request
# Perform viewlet regisration look-up
# from adapters registry
reg = self.getViewletByName(name)
if reg is None:
return None
# factory method is responsible for creating the viewlet instance
factory = reg.factory
# Create viewlet and put it to the acquisition chain
# Viewlet need initialization parameters: context, request, view
try:
viewlet = factory(context, request, self, None).__of__(context)
except TypeError:
# Bad constructor call parameters
raise RuntimeError(
"Unable to initialize viewlet {}. "
"Factory method {} call failed."
.format(name, str(factory)))
return viewlet
def __getitem__(self, name):
"""Allow travering intoviewlets by viewlet name.
"""
viewlet = self.setupViewletByName(name)
if viewlet is None:
raise NotFound("Viewlet {} not found".format(name))
viewlet.update()
return viewlet.render()
|
senaite/senaite.lims
|
src/senaite/lims/browser/bootstrap/viewlets.py
|
ViewletView.setupViewletByName
|
python
|
def setupViewletByName(self, name):
context = aq_inner(self.context)
request = self.request
# Perform viewlet regisration look-up
# from adapters registry
reg = self.getViewletByName(name)
if reg is None:
return None
# factory method is responsible for creating the viewlet instance
factory = reg.factory
# Create viewlet and put it to the acquisition chain
# Viewlet need initialization parameters: context, request, view
try:
viewlet = factory(context, request, self, None).__of__(context)
except TypeError:
# Bad constructor call parameters
raise RuntimeError(
"Unable to initialize viewlet {}. "
"Factory method {} call failed."
.format(name, str(factory)))
return viewlet
|
Constructs a viewlet instance by its name.
Viewlet update() and render() method are not called.
@return: Viewlet instance of None if viewlet with name does not exist
|
train
|
https://github.com/senaite/senaite.lims/blob/3c7fc7b462321fb354c478c19b5c20f3014fa398/src/senaite/lims/browser/bootstrap/viewlets.py#L183-L213
| null |
class ViewletView(BrowserView):
"""Expose arbitrary viewlets to traversing by name.
Example how to render plone.logo viewlet in arbitrary template code point::
<div tal:content="context/@@viewlets/plone.logo" />
https://docs.plone.org/develop/plone/views/viewlets.html#rendering-viewlet-by-name
"""
def __init__(self, context, request):
super(ViewletView, self).__init__(context, request)
self.context = context
self.request = request
def getViewletByName(self, name):
""" Viewlets allow through-the-web customizations.
Through-the-web customization magic is managed by five.customerize.
We need to think of this when looking up viewlets.
@return: Viewlet registration object
"""
views = registration.getViews(IBrowserRequest)
for v in views:
if v.provided == IViewlet:
# Note that we might have conflicting BrowserView with the same
# name, thus we need to check for provided
if v.name == name:
return v
return None
def __getitem__(self, name):
"""Allow travering intoviewlets by viewlet name.
"""
viewlet = self.setupViewletByName(name)
if viewlet is None:
raise NotFound("Viewlet {} not found".format(name))
viewlet.update()
return viewlet.render()
|
senaite/senaite.lims
|
src/senaite/lims/setuphandlers.py
|
setup_handler
|
python
|
def setup_handler(context):
if context.readDataFile('senaite.lims.txt') is None:
return
logger.info("SENAITE setup handler [BEGIN]")
portal = context.getSite() # noqa
# Custom setup handlers
setup_html_filter(portal)
logger.info("SENAITE setup handler [DONE]")
|
Generic setup handler
|
train
|
https://github.com/senaite/senaite.lims/blob/3c7fc7b462321fb354c478c19b5c20f3014fa398/src/senaite/lims/setuphandlers.py#L30-L43
|
[
"def setup_html_filter(portal):\n \"\"\"Setup HTML filtering for resultsinterpretations\n \"\"\"\n logger.info(\"*** Setup HTML Filter ***\")\n # bypass the broken API from portal_transforms\n adapter = IFilterSchema(portal)\n style_whitelist = adapter.style_whitelist\n for style in ALLOWED_STYLES:\n logger.info(\"Allow style '{}'\".format(style))\n if style not in style_whitelist:\n style_whitelist.append(style)\n adapter.style_whitelist = style_whitelist\n"
] |
# -*- coding: utf-8 -*-
#
# This file is part of SENAITE.LIMS.
#
# SENAITE.LIMS is free software: you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
# Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright 2018-2019 by it's authors.
# Some rights reserved, see README and LICENSE.
from plone.app.controlpanel.filter import IFilterSchema
from senaite.lims import logger
ALLOWED_STYLES = [
"color",
"background-color"
]
def setup_html_filter(portal):
"""Setup HTML filtering for resultsinterpretations
"""
logger.info("*** Setup HTML Filter ***")
# bypass the broken API from portal_transforms
adapter = IFilterSchema(portal)
style_whitelist = adapter.style_whitelist
for style in ALLOWED_STYLES:
logger.info("Allow style '{}'".format(style))
if style not in style_whitelist:
style_whitelist.append(style)
adapter.style_whitelist = style_whitelist
def pre_install(portal_setup):
"""Runs berfore the first import step of the *default* profile
This handler is registered as a *pre_handler* in the generic setup profile
:param portal_setup: SetupTool
"""
logger.info("SENAITE LIMS pre-install handler [BEGIN]")
# https://docs.plone.org/develop/addons/components/genericsetup.html#custom-installer-code-setuphandlers-py
profile_id = "profile-senaite.lims:default"
context = portal_setup._getImportContext(profile_id)
portal = context.getSite() # noqa
# Only install the core once!
qi = portal.portal_quickinstaller
if not qi.isProductInstalled("bika.lims"):
portal_setup.runAllImportStepsFromProfile("profile-bika.lims:default")
logger.info("SENAITE LIMS pre-install handler [DONE]")
def post_install(portal_setup):
"""Runs after the last import step of the *default* profile
This handler is registered as a *post_handler* in the generic setup profile
:param portal_setup: SetupTool
"""
logger.info("SENAITE LIMS post-install handler [BEGIN]")
# https://docs.plone.org/develop/addons/components/genericsetup.html#custom-installer-code-setuphandlers-py
profile_id = "profile-senaite.lims:default"
context = portal_setup._getImportContext(profile_id)
portal = context.getSite() # noqa
logger.info("SENAITE LIMS post-install handler [DONE]")
def post_uninstall(portal_setup):
"""Runs after the last import step of the *uninstall* profile
This handler is registered as a *post_handler* in the generic setup profile
:param portal_setup: SetupTool
"""
logger.info("SENAITE LIMS uninstall handler [BEGIN]")
# https://docs.plone.org/develop/addons/components/genericsetup.html#custom-installer-code-setuphandlers-py
profile_id = "profile-senaite.lims:uninstall"
context = portal_setup._getImportContext(profile_id)
portal = context.getSite() # noqa
logger.info("SENAITE LIMS uninstall handler [DONE]")
|
senaite/senaite.lims
|
src/senaite/lims/setuphandlers.py
|
setup_html_filter
|
python
|
def setup_html_filter(portal):
logger.info("*** Setup HTML Filter ***")
# bypass the broken API from portal_transforms
adapter = IFilterSchema(portal)
style_whitelist = adapter.style_whitelist
for style in ALLOWED_STYLES:
logger.info("Allow style '{}'".format(style))
if style not in style_whitelist:
style_whitelist.append(style)
adapter.style_whitelist = style_whitelist
|
Setup HTML filtering for resultsinterpretations
|
train
|
https://github.com/senaite/senaite.lims/blob/3c7fc7b462321fb354c478c19b5c20f3014fa398/src/senaite/lims/setuphandlers.py#L46-L57
| null |
# -*- coding: utf-8 -*-
#
# This file is part of SENAITE.LIMS.
#
# SENAITE.LIMS is free software: you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
# Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright 2018-2019 by it's authors.
# Some rights reserved, see README and LICENSE.
from plone.app.controlpanel.filter import IFilterSchema
from senaite.lims import logger
ALLOWED_STYLES = [
"color",
"background-color"
]
def setup_handler(context):
"""Generic setup handler
"""
if context.readDataFile('senaite.lims.txt') is None:
return
logger.info("SENAITE setup handler [BEGIN]")
portal = context.getSite() # noqa
# Custom setup handlers
setup_html_filter(portal)
logger.info("SENAITE setup handler [DONE]")
def pre_install(portal_setup):
"""Runs berfore the first import step of the *default* profile
This handler is registered as a *pre_handler* in the generic setup profile
:param portal_setup: SetupTool
"""
logger.info("SENAITE LIMS pre-install handler [BEGIN]")
# https://docs.plone.org/develop/addons/components/genericsetup.html#custom-installer-code-setuphandlers-py
profile_id = "profile-senaite.lims:default"
context = portal_setup._getImportContext(profile_id)
portal = context.getSite() # noqa
# Only install the core once!
qi = portal.portal_quickinstaller
if not qi.isProductInstalled("bika.lims"):
portal_setup.runAllImportStepsFromProfile("profile-bika.lims:default")
logger.info("SENAITE LIMS pre-install handler [DONE]")
def post_install(portal_setup):
"""Runs after the last import step of the *default* profile
This handler is registered as a *post_handler* in the generic setup profile
:param portal_setup: SetupTool
"""
logger.info("SENAITE LIMS post-install handler [BEGIN]")
# https://docs.plone.org/develop/addons/components/genericsetup.html#custom-installer-code-setuphandlers-py
profile_id = "profile-senaite.lims:default"
context = portal_setup._getImportContext(profile_id)
portal = context.getSite() # noqa
logger.info("SENAITE LIMS post-install handler [DONE]")
def post_uninstall(portal_setup):
"""Runs after the last import step of the *uninstall* profile
This handler is registered as a *post_handler* in the generic setup profile
:param portal_setup: SetupTool
"""
logger.info("SENAITE LIMS uninstall handler [BEGIN]")
# https://docs.plone.org/develop/addons/components/genericsetup.html#custom-installer-code-setuphandlers-py
profile_id = "profile-senaite.lims:uninstall"
context = portal_setup._getImportContext(profile_id)
portal = context.getSite() # noqa
logger.info("SENAITE LIMS uninstall handler [DONE]")
|
senaite/senaite.lims
|
src/senaite/lims/setuphandlers.py
|
pre_install
|
python
|
def pre_install(portal_setup):
logger.info("SENAITE LIMS pre-install handler [BEGIN]")
# https://docs.plone.org/develop/addons/components/genericsetup.html#custom-installer-code-setuphandlers-py
profile_id = "profile-senaite.lims:default"
context = portal_setup._getImportContext(profile_id)
portal = context.getSite() # noqa
# Only install the core once!
qi = portal.portal_quickinstaller
if not qi.isProductInstalled("bika.lims"):
portal_setup.runAllImportStepsFromProfile("profile-bika.lims:default")
logger.info("SENAITE LIMS pre-install handler [DONE]")
|
Runs berfore the first import step of the *default* profile
This handler is registered as a *pre_handler* in the generic setup profile
:param portal_setup: SetupTool
|
train
|
https://github.com/senaite/senaite.lims/blob/3c7fc7b462321fb354c478c19b5c20f3014fa398/src/senaite/lims/setuphandlers.py#L60-L79
| null |
# -*- coding: utf-8 -*-
#
# This file is part of SENAITE.LIMS.
#
# SENAITE.LIMS is free software: you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
# Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright 2018-2019 by it's authors.
# Some rights reserved, see README and LICENSE.
from plone.app.controlpanel.filter import IFilterSchema
from senaite.lims import logger
ALLOWED_STYLES = [
"color",
"background-color"
]
def setup_handler(context):
"""Generic setup handler
"""
if context.readDataFile('senaite.lims.txt') is None:
return
logger.info("SENAITE setup handler [BEGIN]")
portal = context.getSite() # noqa
# Custom setup handlers
setup_html_filter(portal)
logger.info("SENAITE setup handler [DONE]")
def setup_html_filter(portal):
"""Setup HTML filtering for resultsinterpretations
"""
logger.info("*** Setup HTML Filter ***")
# bypass the broken API from portal_transforms
adapter = IFilterSchema(portal)
style_whitelist = adapter.style_whitelist
for style in ALLOWED_STYLES:
logger.info("Allow style '{}'".format(style))
if style not in style_whitelist:
style_whitelist.append(style)
adapter.style_whitelist = style_whitelist
def post_install(portal_setup):
"""Runs after the last import step of the *default* profile
This handler is registered as a *post_handler* in the generic setup profile
:param portal_setup: SetupTool
"""
logger.info("SENAITE LIMS post-install handler [BEGIN]")
# https://docs.plone.org/develop/addons/components/genericsetup.html#custom-installer-code-setuphandlers-py
profile_id = "profile-senaite.lims:default"
context = portal_setup._getImportContext(profile_id)
portal = context.getSite() # noqa
logger.info("SENAITE LIMS post-install handler [DONE]")
def post_uninstall(portal_setup):
"""Runs after the last import step of the *uninstall* profile
This handler is registered as a *post_handler* in the generic setup profile
:param portal_setup: SetupTool
"""
logger.info("SENAITE LIMS uninstall handler [BEGIN]")
# https://docs.plone.org/develop/addons/components/genericsetup.html#custom-installer-code-setuphandlers-py
profile_id = "profile-senaite.lims:uninstall"
context = portal_setup._getImportContext(profile_id)
portal = context.getSite() # noqa
logger.info("SENAITE LIMS uninstall handler [DONE]")
|
senaite/senaite.lims
|
src/senaite/lims/upgrades/handlers.py
|
to_1000
|
python
|
def to_1000(portal_setup):
logger.info("Run all import steps from SENAITE LIMS ...")
context = portal_setup._getImportContext(PROFILE_ID)
portal = context.getSite()
setup_html_filter(portal)
portal_setup.runAllImportStepsFromProfile(PROFILE_ID)
logger.info("Run all import steps from SENAITE LIMS [DONE]")
|
Initial version to 1000
:param portal_setup: The portal_setup tool
|
train
|
https://github.com/senaite/senaite.lims/blob/3c7fc7b462321fb354c478c19b5c20f3014fa398/src/senaite/lims/upgrades/handlers.py#L27-L38
|
[
"def setup_html_filter(portal):\n \"\"\"Setup HTML filtering for resultsinterpretations\n \"\"\"\n logger.info(\"*** Setup HTML Filter ***\")\n # bypass the broken API from portal_transforms\n adapter = IFilterSchema(portal)\n style_whitelist = adapter.style_whitelist\n for style in ALLOWED_STYLES:\n logger.info(\"Allow style '{}'\".format(style))\n if style not in style_whitelist:\n style_whitelist.append(style)\n adapter.style_whitelist = style_whitelist\n"
] |
# -*- coding: utf-8 -*-
#
# This file is part of SENAITE.LIMS.
#
# SENAITE.LIMS is free software: you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
# Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright 2018-2019 by it's authors.
# Some rights reserved, see README and LICENSE.
from senaite.impress import logger
from senaite.lims.setuphandlers import setup_html_filter
PROFILE_ID = "profile-senaite.lims:default"
|
senaite/senaite.lims
|
src/senaite/lims/browser/spotlight/jsonapi.py
|
spotlight_search_route
|
python
|
def spotlight_search_route(context, request):
catalogs = [
CATALOG_ANALYSIS_REQUEST_LISTING,
"portal_catalog",
"bika_setup_catalog",
"bika_catalog",
"bika_catalog_worksheet_listing"
]
search_results = []
for catalog in catalogs:
search_results.extend(search(catalog=catalog))
# extract the data from all the brains
items = map(get_brain_info, search_results)
return {
"count": len(items),
"items": sorted(items, key=itemgetter("title")),
}
|
The spotlight search route
|
train
|
https://github.com/senaite/senaite.lims/blob/3c7fc7b462321fb354c478c19b5c20f3014fa398/src/senaite/lims/browser/spotlight/jsonapi.py#L31-L52
|
[
"def search(query=None, catalog=None):\n \"\"\"Search\n \"\"\"\n if query is None:\n query = make_query(catalog)\n if query is None:\n return []\n return api.search(query, catalog=catalog)\n"
] |
# -*- coding: utf-8 -*-
#
# This file is part of SENAITE.LIMS.
#
# SENAITE.LIMS is free software: you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
# Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright 2018-2019 by it's authors.
# Some rights reserved, see README and LICENSE.
from operator import itemgetter
from bika.lims.catalog.analysisrequest_catalog import \
CATALOG_ANALYSIS_REQUEST_LISTING
from plone.memoize import forever
from senaite import api
from senaite.jsonapi import add_route
@add_route("/spotlight/search", "senaite.lims.spotlight", methods=["GET"])
def get_brain_info(brain):
"""Extract the brain info
"""
icon = api.get_icon(brain)
# avoid 404 errors with these guys
if "document_icon.gif" in icon:
icon = ""
id = api.get_id(brain)
url = api.get_url(brain)
title = api.get_title(brain)
description = api.get_description(brain)
parent = api.get_parent(brain)
parent_title = api.get_title(parent)
parent_url = api.get_url(parent)
return {
"id": id,
"title": title,
"title_or_id": title or id,
"description": description,
"url": url,
"parent_title": parent_title,
"parent_url": parent_url,
"icon": icon,
}
def search(query=None, catalog=None):
"""Search
"""
if query is None:
query = make_query(catalog)
if query is None:
return []
return api.search(query, catalog=catalog)
@forever.memoize
def get_search_index_for(catalog):
"""Returns the search index to query
"""
searchable_text_index = "SearchableText"
listing_searchable_text_index = "listing_searchable_text"
if catalog == CATALOG_ANALYSIS_REQUEST_LISTING:
tool = api.get_tool(catalog)
indexes = tool.indexes()
if listing_searchable_text_index in indexes:
return listing_searchable_text_index
return searchable_text_index
def make_query(catalog):
"""A function to prepare a query
"""
query = {}
request = api.get_request()
index = get_search_index_for(catalog)
limit = request.form.get("limit")
q = request.form.get("q")
if len(q) > 0:
query[index] = q + "*"
else:
return None
portal_type = request.form.get("portal_type")
if portal_type:
if not isinstance(portal_type, list):
portal_type = [portal_type]
query["portal_type"] = portal_type
if limit and limit.isdigit():
query["sort_limit"] = int(limit)
return query
|
senaite/senaite.lims
|
src/senaite/lims/browser/spotlight/jsonapi.py
|
get_brain_info
|
python
|
def get_brain_info(brain):
icon = api.get_icon(brain)
# avoid 404 errors with these guys
if "document_icon.gif" in icon:
icon = ""
id = api.get_id(brain)
url = api.get_url(brain)
title = api.get_title(brain)
description = api.get_description(brain)
parent = api.get_parent(brain)
parent_title = api.get_title(parent)
parent_url = api.get_url(parent)
return {
"id": id,
"title": title,
"title_or_id": title or id,
"description": description,
"url": url,
"parent_title": parent_title,
"parent_url": parent_url,
"icon": icon,
}
|
Extract the brain info
|
train
|
https://github.com/senaite/senaite.lims/blob/3c7fc7b462321fb354c478c19b5c20f3014fa398/src/senaite/lims/browser/spotlight/jsonapi.py#L55-L80
| null |
# -*- coding: utf-8 -*-
#
# This file is part of SENAITE.LIMS.
#
# SENAITE.LIMS is free software: you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
# Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright 2018-2019 by it's authors.
# Some rights reserved, see README and LICENSE.
from operator import itemgetter
from bika.lims.catalog.analysisrequest_catalog import \
CATALOG_ANALYSIS_REQUEST_LISTING
from plone.memoize import forever
from senaite import api
from senaite.jsonapi import add_route
@add_route("/spotlight/search", "senaite.lims.spotlight", methods=["GET"])
def spotlight_search_route(context, request):
"""The spotlight search route
"""
catalogs = [
CATALOG_ANALYSIS_REQUEST_LISTING,
"portal_catalog",
"bika_setup_catalog",
"bika_catalog",
"bika_catalog_worksheet_listing"
]
search_results = []
for catalog in catalogs:
search_results.extend(search(catalog=catalog))
# extract the data from all the brains
items = map(get_brain_info, search_results)
return {
"count": len(items),
"items": sorted(items, key=itemgetter("title")),
}
def search(query=None, catalog=None):
"""Search
"""
if query is None:
query = make_query(catalog)
if query is None:
return []
return api.search(query, catalog=catalog)
@forever.memoize
def get_search_index_for(catalog):
"""Returns the search index to query
"""
searchable_text_index = "SearchableText"
listing_searchable_text_index = "listing_searchable_text"
if catalog == CATALOG_ANALYSIS_REQUEST_LISTING:
tool = api.get_tool(catalog)
indexes = tool.indexes()
if listing_searchable_text_index in indexes:
return listing_searchable_text_index
return searchable_text_index
def make_query(catalog):
"""A function to prepare a query
"""
query = {}
request = api.get_request()
index = get_search_index_for(catalog)
limit = request.form.get("limit")
q = request.form.get("q")
if len(q) > 0:
query[index] = q + "*"
else:
return None
portal_type = request.form.get("portal_type")
if portal_type:
if not isinstance(portal_type, list):
portal_type = [portal_type]
query["portal_type"] = portal_type
if limit and limit.isdigit():
query["sort_limit"] = int(limit)
return query
|
senaite/senaite.lims
|
src/senaite/lims/browser/spotlight/jsonapi.py
|
search
|
python
|
def search(query=None, catalog=None):
if query is None:
query = make_query(catalog)
if query is None:
return []
return api.search(query, catalog=catalog)
|
Search
|
train
|
https://github.com/senaite/senaite.lims/blob/3c7fc7b462321fb354c478c19b5c20f3014fa398/src/senaite/lims/browser/spotlight/jsonapi.py#L83-L90
|
[
"def make_query(catalog):\n \"\"\"A function to prepare a query\n \"\"\"\n query = {}\n request = api.get_request()\n index = get_search_index_for(catalog)\n limit = request.form.get(\"limit\")\n\n q = request.form.get(\"q\")\n if len(q) > 0:\n query[index] = q + \"*\"\n else:\n return None\n\n portal_type = request.form.get(\"portal_type\")\n if portal_type:\n if not isinstance(portal_type, list):\n portal_type = [portal_type]\n query[\"portal_type\"] = portal_type\n\n if limit and limit.isdigit():\n query[\"sort_limit\"] = int(limit)\n\n return query\n"
] |
# -*- coding: utf-8 -*-
#
# This file is part of SENAITE.LIMS.
#
# SENAITE.LIMS is free software: you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
# Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright 2018-2019 by it's authors.
# Some rights reserved, see README and LICENSE.
from operator import itemgetter
from bika.lims.catalog.analysisrequest_catalog import \
CATALOG_ANALYSIS_REQUEST_LISTING
from plone.memoize import forever
from senaite import api
from senaite.jsonapi import add_route
@add_route("/spotlight/search", "senaite.lims.spotlight", methods=["GET"])
def spotlight_search_route(context, request):
"""The spotlight search route
"""
catalogs = [
CATALOG_ANALYSIS_REQUEST_LISTING,
"portal_catalog",
"bika_setup_catalog",
"bika_catalog",
"bika_catalog_worksheet_listing"
]
search_results = []
for catalog in catalogs:
search_results.extend(search(catalog=catalog))
# extract the data from all the brains
items = map(get_brain_info, search_results)
return {
"count": len(items),
"items": sorted(items, key=itemgetter("title")),
}
def get_brain_info(brain):
"""Extract the brain info
"""
icon = api.get_icon(brain)
# avoid 404 errors with these guys
if "document_icon.gif" in icon:
icon = ""
id = api.get_id(brain)
url = api.get_url(brain)
title = api.get_title(brain)
description = api.get_description(brain)
parent = api.get_parent(brain)
parent_title = api.get_title(parent)
parent_url = api.get_url(parent)
return {
"id": id,
"title": title,
"title_or_id": title or id,
"description": description,
"url": url,
"parent_title": parent_title,
"parent_url": parent_url,
"icon": icon,
}
@forever.memoize
def get_search_index_for(catalog):
"""Returns the search index to query
"""
searchable_text_index = "SearchableText"
listing_searchable_text_index = "listing_searchable_text"
if catalog == CATALOG_ANALYSIS_REQUEST_LISTING:
tool = api.get_tool(catalog)
indexes = tool.indexes()
if listing_searchable_text_index in indexes:
return listing_searchable_text_index
return searchable_text_index
def make_query(catalog):
"""A function to prepare a query
"""
query = {}
request = api.get_request()
index = get_search_index_for(catalog)
limit = request.form.get("limit")
q = request.form.get("q")
if len(q) > 0:
query[index] = q + "*"
else:
return None
portal_type = request.form.get("portal_type")
if portal_type:
if not isinstance(portal_type, list):
portal_type = [portal_type]
query["portal_type"] = portal_type
if limit and limit.isdigit():
query["sort_limit"] = int(limit)
return query
|
senaite/senaite.lims
|
src/senaite/lims/browser/spotlight/jsonapi.py
|
get_search_index_for
|
python
|
def get_search_index_for(catalog):
searchable_text_index = "SearchableText"
listing_searchable_text_index = "listing_searchable_text"
if catalog == CATALOG_ANALYSIS_REQUEST_LISTING:
tool = api.get_tool(catalog)
indexes = tool.indexes()
if listing_searchable_text_index in indexes:
return listing_searchable_text_index
return searchable_text_index
|
Returns the search index to query
|
train
|
https://github.com/senaite/senaite.lims/blob/3c7fc7b462321fb354c478c19b5c20f3014fa398/src/senaite/lims/browser/spotlight/jsonapi.py#L94-L106
| null |
# -*- coding: utf-8 -*-
#
# This file is part of SENAITE.LIMS.
#
# SENAITE.LIMS is free software: you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
# Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright 2018-2019 by it's authors.
# Some rights reserved, see README and LICENSE.
from operator import itemgetter
from bika.lims.catalog.analysisrequest_catalog import \
CATALOG_ANALYSIS_REQUEST_LISTING
from plone.memoize import forever
from senaite import api
from senaite.jsonapi import add_route
@add_route("/spotlight/search", "senaite.lims.spotlight", methods=["GET"])
def spotlight_search_route(context, request):
"""The spotlight search route
"""
catalogs = [
CATALOG_ANALYSIS_REQUEST_LISTING,
"portal_catalog",
"bika_setup_catalog",
"bika_catalog",
"bika_catalog_worksheet_listing"
]
search_results = []
for catalog in catalogs:
search_results.extend(search(catalog=catalog))
# extract the data from all the brains
items = map(get_brain_info, search_results)
return {
"count": len(items),
"items": sorted(items, key=itemgetter("title")),
}
def get_brain_info(brain):
"""Extract the brain info
"""
icon = api.get_icon(brain)
# avoid 404 errors with these guys
if "document_icon.gif" in icon:
icon = ""
id = api.get_id(brain)
url = api.get_url(brain)
title = api.get_title(brain)
description = api.get_description(brain)
parent = api.get_parent(brain)
parent_title = api.get_title(parent)
parent_url = api.get_url(parent)
return {
"id": id,
"title": title,
"title_or_id": title or id,
"description": description,
"url": url,
"parent_title": parent_title,
"parent_url": parent_url,
"icon": icon,
}
def search(query=None, catalog=None):
"""Search
"""
if query is None:
query = make_query(catalog)
if query is None:
return []
return api.search(query, catalog=catalog)
@forever.memoize
def make_query(catalog):
"""A function to prepare a query
"""
query = {}
request = api.get_request()
index = get_search_index_for(catalog)
limit = request.form.get("limit")
q = request.form.get("q")
if len(q) > 0:
query[index] = q + "*"
else:
return None
portal_type = request.form.get("portal_type")
if portal_type:
if not isinstance(portal_type, list):
portal_type = [portal_type]
query["portal_type"] = portal_type
if limit and limit.isdigit():
query["sort_limit"] = int(limit)
return query
|
senaite/senaite.lims
|
src/senaite/lims/browser/spotlight/jsonapi.py
|
make_query
|
python
|
def make_query(catalog):
query = {}
request = api.get_request()
index = get_search_index_for(catalog)
limit = request.form.get("limit")
q = request.form.get("q")
if len(q) > 0:
query[index] = q + "*"
else:
return None
portal_type = request.form.get("portal_type")
if portal_type:
if not isinstance(portal_type, list):
portal_type = [portal_type]
query["portal_type"] = portal_type
if limit and limit.isdigit():
query["sort_limit"] = int(limit)
return query
|
A function to prepare a query
|
train
|
https://github.com/senaite/senaite.lims/blob/3c7fc7b462321fb354c478c19b5c20f3014fa398/src/senaite/lims/browser/spotlight/jsonapi.py#L109-L132
| null |
# -*- coding: utf-8 -*-
#
# This file is part of SENAITE.LIMS.
#
# SENAITE.LIMS is free software: you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
# Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright 2018-2019 by it's authors.
# Some rights reserved, see README and LICENSE.
from operator import itemgetter
from bika.lims.catalog.analysisrequest_catalog import \
CATALOG_ANALYSIS_REQUEST_LISTING
from plone.memoize import forever
from senaite import api
from senaite.jsonapi import add_route
@add_route("/spotlight/search", "senaite.lims.spotlight", methods=["GET"])
def spotlight_search_route(context, request):
"""The spotlight search route
"""
catalogs = [
CATALOG_ANALYSIS_REQUEST_LISTING,
"portal_catalog",
"bika_setup_catalog",
"bika_catalog",
"bika_catalog_worksheet_listing"
]
search_results = []
for catalog in catalogs:
search_results.extend(search(catalog=catalog))
# extract the data from all the brains
items = map(get_brain_info, search_results)
return {
"count": len(items),
"items": sorted(items, key=itemgetter("title")),
}
def get_brain_info(brain):
"""Extract the brain info
"""
icon = api.get_icon(brain)
# avoid 404 errors with these guys
if "document_icon.gif" in icon:
icon = ""
id = api.get_id(brain)
url = api.get_url(brain)
title = api.get_title(brain)
description = api.get_description(brain)
parent = api.get_parent(brain)
parent_title = api.get_title(parent)
parent_url = api.get_url(parent)
return {
"id": id,
"title": title,
"title_or_id": title or id,
"description": description,
"url": url,
"parent_title": parent_title,
"parent_url": parent_url,
"icon": icon,
}
def search(query=None, catalog=None):
"""Search
"""
if query is None:
query = make_query(catalog)
if query is None:
return []
return api.search(query, catalog=catalog)
@forever.memoize
def get_search_index_for(catalog):
"""Returns the search index to query
"""
searchable_text_index = "SearchableText"
listing_searchable_text_index = "listing_searchable_text"
if catalog == CATALOG_ANALYSIS_REQUEST_LISTING:
tool = api.get_tool(catalog)
indexes = tool.indexes()
if listing_searchable_text_index in indexes:
return listing_searchable_text_index
return searchable_text_index
|
senaite/senaite.lims
|
src/senaite/lims/browser/bootstrap/views.py
|
icon_cache_key
|
python
|
def icon_cache_key(method, self, brain_or_object):
url = api.get_url(brain_or_object)
modified = api.get_modification_date(brain_or_object).millis()
key = "{}?modified={}".format(url, modified)
logger.debug("Generated Cache Key: {}".format(key))
return key
|
Generates a cache key for the icon lookup
Includes the virtual URL to handle multiple HTTP/HTTPS domains
Example: http://senaite.local/clients?modified=1512033263370
|
train
|
https://github.com/senaite/senaite.lims/blob/3c7fc7b462321fb354c478c19b5c20f3014fa398/src/senaite/lims/browser/bootstrap/views.py#L33-L43
| null |
# -*- coding: utf-8 -*-
#
# This file is part of SENAITE.LIMS.
#
# SENAITE.LIMS is free software: you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
# Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright 2018-2019 by it's authors.
# Some rights reserved, see README and LICENSE.
from bika.lims import api
from plone.app.controlpanel.overview import OverviewControlPanel
from plone.memoize.ram import cache
from Products.Five import BrowserView
from Products.Five.browser.pagetemplatefile import ViewPageTemplateFile
from senaite.lims import logger
from zope.component import getMultiAdapter
from zope.interface import implements
from .interfaces import IBootstrapView
class SenaiteOverviewControlPanel(OverviewControlPanel):
template = ViewPageTemplateFile(
"templates/plone.app.controlpanel.overview.pt")
class BootstrapView(BrowserView):
"""Twitter Bootstrap helper view for SENAITE LIMS
"""
implements(IBootstrapView)
def __init__(self, context, request):
super(BrowserView, self).__init__(context, request)
@cache(icon_cache_key)
def get_icon_for(self, brain_or_object):
"""Get the navigation portlet icon for the brain or object
The cache key ensures that the lookup is done only once per domain name
"""
portal_types = api.get_tool("portal_types")
fti = portal_types.getTypeInfo(api.get_portal_type(brain_or_object))
icon = fti.getIcon()
if not icon:
return ""
# Always try to get the big icon for high-res displays
icon_big = icon.replace(".png", "_big.png")
# fall back to a default icon if the looked up icon does not exist
if self.context.restrictedTraverse(icon_big, None) is None:
icon_big = None
portal_url = api.get_url(api.get_portal())
title = api.get_title(brain_or_object)
html_tag = "<img title='{}' src='{}/{}' width='16' />".format(
title, portal_url, icon_big or icon)
logger.info("Generated Icon Tag for {}: {}".format(
api.get_path(brain_or_object), html_tag))
return html_tag
def getViewportValues(self, view=None):
"""Determine the value of the viewport meta-tag
"""
values = {
'width': 'device-width',
'initial-scale': '1.0',
}
return ','.join('%s=%s' % (k, v) for k, v in values.items())
def getColumnsClasses(self, view=None):
"""Determine whether a column should be shown. The left column is
called plone.leftcolumn; the right column is called
plone.rightcolumn.
"""
plone_view = getMultiAdapter(
(self.context, self.request), name=u'plone')
portal_state = getMultiAdapter(
(self.context, self.request), name=u'plone_portal_state')
sl = plone_view.have_portlets('plone.leftcolumn', view=view)
sr = plone_view.have_portlets('plone.rightcolumn', view=view)
isRTL = portal_state.is_rtl()
# pre-fill dictionary
columns = dict(one="", content="", two="")
if not sl and not sr:
# we don't have columns, thus conten takes the whole width
columns['content'] = "col-md-12"
elif sl and sr:
# In case we have both columns, content takes 50% of the whole
# width and the rest 50% is spread between the columns
columns['one'] = "col-xs-12 col-md-2"
columns['content'] = "col-xs-12 col-md-8"
columns['two'] = "col-xs-12 col-md-2"
elif (sr and not sl) and not isRTL:
# We have right column and we are NOT in RTL language
columns['content'] = "col-xs-12 col-md-10"
columns['two'] = "col-xs-12 col-md-2"
elif (sl and not sr) and isRTL:
# We have left column and we are in RTL language
columns['one'] = "col-xs-12 col-md-2"
columns['content'] = "col-xs-12 col-md-10"
elif (sl and not sr) and not isRTL:
# We have left column and we are in NOT RTL language
columns['one'] = "col-xs-12 col-md-2"
columns['content'] = "col-xs-12 col-md-10"
# # append cell to each css-string
# for key, value in columns.items():
# columns[key] = "cell " + value
return columns
|
senaite/senaite.lims
|
src/senaite/lims/browser/bootstrap/views.py
|
BootstrapView.get_icon_for
|
python
|
def get_icon_for(self, brain_or_object):
portal_types = api.get_tool("portal_types")
fti = portal_types.getTypeInfo(api.get_portal_type(brain_or_object))
icon = fti.getIcon()
if not icon:
return ""
# Always try to get the big icon for high-res displays
icon_big = icon.replace(".png", "_big.png")
# fall back to a default icon if the looked up icon does not exist
if self.context.restrictedTraverse(icon_big, None) is None:
icon_big = None
portal_url = api.get_url(api.get_portal())
title = api.get_title(brain_or_object)
html_tag = "<img title='{}' src='{}/{}' width='16' />".format(
title, portal_url, icon_big or icon)
logger.info("Generated Icon Tag for {}: {}".format(
api.get_path(brain_or_object), html_tag))
return html_tag
|
Get the navigation portlet icon for the brain or object
The cache key ensures that the lookup is done only once per domain name
|
train
|
https://github.com/senaite/senaite.lims/blob/3c7fc7b462321fb354c478c19b5c20f3014fa398/src/senaite/lims/browser/bootstrap/views.py#L60-L81
| null |
class BootstrapView(BrowserView):
"""Twitter Bootstrap helper view for SENAITE LIMS
"""
implements(IBootstrapView)
def __init__(self, context, request):
super(BrowserView, self).__init__(context, request)
@cache(icon_cache_key)
def getViewportValues(self, view=None):
"""Determine the value of the viewport meta-tag
"""
values = {
'width': 'device-width',
'initial-scale': '1.0',
}
return ','.join('%s=%s' % (k, v) for k, v in values.items())
def getColumnsClasses(self, view=None):
"""Determine whether a column should be shown. The left column is
called plone.leftcolumn; the right column is called
plone.rightcolumn.
"""
plone_view = getMultiAdapter(
(self.context, self.request), name=u'plone')
portal_state = getMultiAdapter(
(self.context, self.request), name=u'plone_portal_state')
sl = plone_view.have_portlets('plone.leftcolumn', view=view)
sr = plone_view.have_portlets('plone.rightcolumn', view=view)
isRTL = portal_state.is_rtl()
# pre-fill dictionary
columns = dict(one="", content="", two="")
if not sl and not sr:
# we don't have columns, thus conten takes the whole width
columns['content'] = "col-md-12"
elif sl and sr:
# In case we have both columns, content takes 50% of the whole
# width and the rest 50% is spread between the columns
columns['one'] = "col-xs-12 col-md-2"
columns['content'] = "col-xs-12 col-md-8"
columns['two'] = "col-xs-12 col-md-2"
elif (sr and not sl) and not isRTL:
# We have right column and we are NOT in RTL language
columns['content'] = "col-xs-12 col-md-10"
columns['two'] = "col-xs-12 col-md-2"
elif (sl and not sr) and isRTL:
# We have left column and we are in RTL language
columns['one'] = "col-xs-12 col-md-2"
columns['content'] = "col-xs-12 col-md-10"
elif (sl and not sr) and not isRTL:
# We have left column and we are in NOT RTL language
columns['one'] = "col-xs-12 col-md-2"
columns['content'] = "col-xs-12 col-md-10"
# # append cell to each css-string
# for key, value in columns.items():
# columns[key] = "cell " + value
return columns
|
senaite/senaite.lims
|
src/senaite/lims/browser/bootstrap/views.py
|
BootstrapView.getViewportValues
|
python
|
def getViewportValues(self, view=None):
values = {
'width': 'device-width',
'initial-scale': '1.0',
}
return ','.join('%s=%s' % (k, v) for k, v in values.items())
|
Determine the value of the viewport meta-tag
|
train
|
https://github.com/senaite/senaite.lims/blob/3c7fc7b462321fb354c478c19b5c20f3014fa398/src/senaite/lims/browser/bootstrap/views.py#L83-L91
| null |
class BootstrapView(BrowserView):
"""Twitter Bootstrap helper view for SENAITE LIMS
"""
implements(IBootstrapView)
def __init__(self, context, request):
super(BrowserView, self).__init__(context, request)
@cache(icon_cache_key)
def get_icon_for(self, brain_or_object):
"""Get the navigation portlet icon for the brain or object
The cache key ensures that the lookup is done only once per domain name
"""
portal_types = api.get_tool("portal_types")
fti = portal_types.getTypeInfo(api.get_portal_type(brain_or_object))
icon = fti.getIcon()
if not icon:
return ""
# Always try to get the big icon for high-res displays
icon_big = icon.replace(".png", "_big.png")
# fall back to a default icon if the looked up icon does not exist
if self.context.restrictedTraverse(icon_big, None) is None:
icon_big = None
portal_url = api.get_url(api.get_portal())
title = api.get_title(brain_or_object)
html_tag = "<img title='{}' src='{}/{}' width='16' />".format(
title, portal_url, icon_big or icon)
logger.info("Generated Icon Tag for {}: {}".format(
api.get_path(brain_or_object), html_tag))
return html_tag
def getColumnsClasses(self, view=None):
"""Determine whether a column should be shown. The left column is
called plone.leftcolumn; the right column is called
plone.rightcolumn.
"""
plone_view = getMultiAdapter(
(self.context, self.request), name=u'plone')
portal_state = getMultiAdapter(
(self.context, self.request), name=u'plone_portal_state')
sl = plone_view.have_portlets('plone.leftcolumn', view=view)
sr = plone_view.have_portlets('plone.rightcolumn', view=view)
isRTL = portal_state.is_rtl()
# pre-fill dictionary
columns = dict(one="", content="", two="")
if not sl and not sr:
# we don't have columns, thus conten takes the whole width
columns['content'] = "col-md-12"
elif sl and sr:
# In case we have both columns, content takes 50% of the whole
# width and the rest 50% is spread between the columns
columns['one'] = "col-xs-12 col-md-2"
columns['content'] = "col-xs-12 col-md-8"
columns['two'] = "col-xs-12 col-md-2"
elif (sr and not sl) and not isRTL:
# We have right column and we are NOT in RTL language
columns['content'] = "col-xs-12 col-md-10"
columns['two'] = "col-xs-12 col-md-2"
elif (sl and not sr) and isRTL:
# We have left column and we are in RTL language
columns['one'] = "col-xs-12 col-md-2"
columns['content'] = "col-xs-12 col-md-10"
elif (sl and not sr) and not isRTL:
# We have left column and we are in NOT RTL language
columns['one'] = "col-xs-12 col-md-2"
columns['content'] = "col-xs-12 col-md-10"
# # append cell to each css-string
# for key, value in columns.items():
# columns[key] = "cell " + value
return columns
|
senaite/senaite.lims
|
src/senaite/lims/browser/bootstrap/views.py
|
BootstrapView.getColumnsClasses
|
python
|
def getColumnsClasses(self, view=None):
plone_view = getMultiAdapter(
(self.context, self.request), name=u'plone')
portal_state = getMultiAdapter(
(self.context, self.request), name=u'plone_portal_state')
sl = plone_view.have_portlets('plone.leftcolumn', view=view)
sr = plone_view.have_portlets('plone.rightcolumn', view=view)
isRTL = portal_state.is_rtl()
# pre-fill dictionary
columns = dict(one="", content="", two="")
if not sl and not sr:
# we don't have columns, thus conten takes the whole width
columns['content'] = "col-md-12"
elif sl and sr:
# In case we have both columns, content takes 50% of the whole
# width and the rest 50% is spread between the columns
columns['one'] = "col-xs-12 col-md-2"
columns['content'] = "col-xs-12 col-md-8"
columns['two'] = "col-xs-12 col-md-2"
elif (sr and not sl) and not isRTL:
# We have right column and we are NOT in RTL language
columns['content'] = "col-xs-12 col-md-10"
columns['two'] = "col-xs-12 col-md-2"
elif (sl and not sr) and isRTL:
# We have left column and we are in RTL language
columns['one'] = "col-xs-12 col-md-2"
columns['content'] = "col-xs-12 col-md-10"
elif (sl and not sr) and not isRTL:
# We have left column and we are in NOT RTL language
columns['one'] = "col-xs-12 col-md-2"
columns['content'] = "col-xs-12 col-md-10"
# # append cell to each css-string
# for key, value in columns.items():
# columns[key] = "cell " + value
return columns
|
Determine whether a column should be shown. The left column is
called plone.leftcolumn; the right column is called
plone.rightcolumn.
|
train
|
https://github.com/senaite/senaite.lims/blob/3c7fc7b462321fb354c478c19b5c20f3014fa398/src/senaite/lims/browser/bootstrap/views.py#L93-L142
| null |
class BootstrapView(BrowserView):
"""Twitter Bootstrap helper view for SENAITE LIMS
"""
implements(IBootstrapView)
def __init__(self, context, request):
super(BrowserView, self).__init__(context, request)
@cache(icon_cache_key)
def get_icon_for(self, brain_or_object):
"""Get the navigation portlet icon for the brain or object
The cache key ensures that the lookup is done only once per domain name
"""
portal_types = api.get_tool("portal_types")
fti = portal_types.getTypeInfo(api.get_portal_type(brain_or_object))
icon = fti.getIcon()
if not icon:
return ""
# Always try to get the big icon for high-res displays
icon_big = icon.replace(".png", "_big.png")
# fall back to a default icon if the looked up icon does not exist
if self.context.restrictedTraverse(icon_big, None) is None:
icon_big = None
portal_url = api.get_url(api.get_portal())
title = api.get_title(brain_or_object)
html_tag = "<img title='{}' src='{}/{}' width='16' />".format(
title, portal_url, icon_big or icon)
logger.info("Generated Icon Tag for {}: {}".format(
api.get_path(brain_or_object), html_tag))
return html_tag
def getViewportValues(self, view=None):
"""Determine the value of the viewport meta-tag
"""
values = {
'width': 'device-width',
'initial-scale': '1.0',
}
return ','.join('%s=%s' % (k, v) for k, v in values.items())
|
senaite/senaite.lims
|
src/senaite/lims/browser/controlpanel/views/setupview.py
|
SetupView.get_icon_url
|
python
|
def get_icon_url(self, brain):
icon_url = api.get_icon(brain, html_tag=False)
url, icon = icon_url.rsplit("/", 1)
relative_url = url.lstrip(self.portal.absolute_url())
name, ext = os.path.splitext(icon)
# big icons endwith _big
if not name.endswith("_big"):
icon = "{}_big{}".format(name, ext)
icon_big_url = "/".join([relative_url, icon])
# fall back to a default icon if the looked up icon does not exist
if self.context.restrictedTraverse(icon_big_url, None) is None:
icon_big_url = "++resource++senaite.lims.images/gears.png"
return icon_big_url
|
Returns the (big) icon URL for the given catalog brain
|
train
|
https://github.com/senaite/senaite.lims/blob/3c7fc7b462321fb354c478c19b5c20f3014fa398/src/senaite/lims/browser/controlpanel/views/setupview.py#L63-L81
| null |
class SetupView(BrowserView):
"""SENAITE LIMS Setup View
"""
template = ViewPageTemplateFile("templates/setupview.pt")
def __init__(self, context, request):
self.context = context
self.request = request
def __call__(self):
self.request.set("disable_border", 1)
return self.template()
@property
def portal(self):
"""Returns the Portal Object
"""
return api.get_portal()
@property
def setup(self):
"""Returns the Senaite Setup Object
"""
return api.get_setup()
@cache(modified_cache_key, store_on_context)
def setupitems(self):
"""Lookup available setup items
:returns: catalog brains
"""
query = {
"path": {
"query": api.get_path(self.setup),
"depth": 1,
},
}
items = api.search(query, "portal_catalog")
# filter out items
items = filter(lambda item: not item.exclude_from_nav, items)
# sort by (translated) title
def cmp_by_translated_title(brain1, brain2):
title1 = t(api.get_title(brain1))
title2 = t(api.get_title(brain2))
return cmp(title1, title2)
return sorted(items, cmp=cmp_by_translated_title)
|
senaite/senaite.lims
|
src/senaite/lims/browser/controlpanel/views/setupview.py
|
SetupView.setupitems
|
python
|
def setupitems(self):
query = {
"path": {
"query": api.get_path(self.setup),
"depth": 1,
},
}
items = api.search(query, "portal_catalog")
# filter out items
items = filter(lambda item: not item.exclude_from_nav, items)
# sort by (translated) title
def cmp_by_translated_title(brain1, brain2):
title1 = t(api.get_title(brain1))
title2 = t(api.get_title(brain2))
return cmp(title1, title2)
return sorted(items, cmp=cmp_by_translated_title)
|
Lookup available setup items
:returns: catalog brains
|
train
|
https://github.com/senaite/senaite.lims/blob/3c7fc7b462321fb354c478c19b5c20f3014fa398/src/senaite/lims/browser/controlpanel/views/setupview.py#L83-L104
| null |
class SetupView(BrowserView):
"""SENAITE LIMS Setup View
"""
template = ViewPageTemplateFile("templates/setupview.pt")
def __init__(self, context, request):
self.context = context
self.request = request
def __call__(self):
self.request.set("disable_border", 1)
return self.template()
@property
def portal(self):
"""Returns the Portal Object
"""
return api.get_portal()
@property
def setup(self):
"""Returns the Senaite Setup Object
"""
return api.get_setup()
@cache(modified_cache_key, store_on_context)
def get_icon_url(self, brain):
"""Returns the (big) icon URL for the given catalog brain
"""
icon_url = api.get_icon(brain, html_tag=False)
url, icon = icon_url.rsplit("/", 1)
relative_url = url.lstrip(self.portal.absolute_url())
name, ext = os.path.splitext(icon)
# big icons endwith _big
if not name.endswith("_big"):
icon = "{}_big{}".format(name, ext)
icon_big_url = "/".join([relative_url, icon])
# fall back to a default icon if the looked up icon does not exist
if self.context.restrictedTraverse(icon_big_url, None) is None:
icon_big_url = "++resource++senaite.lims.images/gears.png"
return icon_big_url
|
ppb/pursuedpybear
|
ppb/engine.py
|
GameEngine.on_start_scene
|
python
|
def on_start_scene(self, event: StartScene, signal: Callable[[Any], None]):
self.pause_scene()
self.start_scene(event.new_scene, event.kwargs)
|
Start a new scene. The current scene pauses.
|
train
|
https://github.com/ppb/pursuedpybear/blob/db3bfaaf86d14b4d1bb9e0b24cc8dc63f29c2191/ppb/engine.py#L143-L148
|
[
"def pause_scene(self):\n # Empty the queue before changing scenes.\n self.flush_events()\n self.signal(events.ScenePaused())\n self.publish()\n",
"def start_scene(self, scene, kwargs):\n if isinstance(scene, type):\n scene = scene(self, **(kwargs or {}))\n self.scenes.append(scene)\n self.signal(events.SceneStarted())\n"
] |
class GameEngine(Engine, EventMixin, LoggingMixin):
def __init__(self, first_scene: Type, *,
systems=(Renderer, Updater, PygameEventPoller),
scene_kwargs=None, **kwargs):
super(GameEngine, self).__init__()
# Engine Configuration
self.first_scene = first_scene
self.scene_kwargs = scene_kwargs or {}
self.kwargs = kwargs
# Engine State
self.scenes = []
self.events = deque()
self.event_extensions: DefaultDict[Union[Type, _ellipsis], List[Callable[[Any], None]]] = defaultdict(list)
self.running = False
self.entered = False
self._last_idle_time = None
# Systems
self.systems_classes = systems
self.systems = []
self.exit_stack = ExitStack()
@property
def current_scene(self):
try:
return self.scenes[-1]
except IndexError:
return None
def __enter__(self):
self.logger.info("Entering context")
self.start_systems()
self.entered = True
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.logger.info("Exiting context")
self.entered = False
self.exit_stack.close()
def start_systems(self):
if self.systems:
return
for system in self.systems_classes:
if isinstance(system, type):
system = system(engine=self, **self.kwargs)
self.systems.append(system)
self.exit_stack.enter_context(system)
def run(self):
if not self.entered:
with self:
self.start()
self.main_loop()
else:
self.start()
self.main_loop()
def start(self):
self.running = True
self._last_idle_time = time.monotonic()
self.activate({"scene_class": self.first_scene,
"kwargs": self.scene_kwargs})
def main_loop(self):
while self.running:
time.sleep(0)
now = time.monotonic()
self.signal(events.Idle(now - self._last_idle_time))
self._last_idle_time = now
while self.events:
self.publish()
self.manage_scene()
def activate(self, next_scene: dict):
scene = next_scene["scene_class"]
if scene is None:
return
args = next_scene.get("args", [])
kwargs = next_scene.get("kwargs", {})
self.scenes.append(scene(self, *args, **kwargs))
def signal(self, event):
self.events.append(event)
def publish(self):
event = self.events.popleft()
scene = self.current_scene
event.scene = scene
extensions = chain(self.event_extensions[type(event)], self.event_extensions[...])
for callback in extensions:
callback(event)
self.__event__(event, self.signal)
for system in self.systems:
system.__event__(event, self.signal)
# Required for if we publish with no current scene.
# Should only happen when the last scene stops via event.
if scene is not None:
scene.__event__(event, self.signal)
for game_object in scene:
game_object.__event__(event, self.signal)
def manage_scene(self):
if self.current_scene is None:
self.running = False
return None
scene_running, next_scene = self.current_scene.change()
if not scene_running:
self.scenes.pop()
if next_scene:
self.activate(next_scene)
def on_stop_scene(self, event: events.StopScene, signal: Callable[[Any], None]):
"""
Stop a running scene. If there's a scene on the stack, it resumes.
"""
self.stop_scene()
if self.current_scene is not None:
signal(events.SceneContinued())
else:
signal(events.Quit())
def on_replace_scene(self, event: events.ReplaceScene, signal):
"""
Replace the running scene with a new one.
"""
self.stop_scene()
self.start_scene(event.new_scene, event.kwargs)
def on_quit(self, quit_event: Quit, signal: Callable[[Any], None]):
self.running = False
def pause_scene(self):
# Empty the queue before changing scenes.
self.flush_events()
self.signal(events.ScenePaused())
self.publish()
def stop_scene(self):
# Empty the queue before changing scenes.
self.flush_events()
self.signal(events.SceneStopped())
self.publish()
self.scenes.pop()
def start_scene(self, scene, kwargs):
if isinstance(scene, type):
scene = scene(self, **(kwargs or {}))
self.scenes.append(scene)
self.signal(events.SceneStarted())
def register(self, event_type: Union[Type, _ellipsis], callback: Callable[[], Any]):
"""
Register a callback to be applied to an event at time of publishing.
Primarily to be used by subsystems.
The callback will receive the event. Your code should modify the event
in place. It does not need to return it.
:param event_type: The class of an event.
:param callback: A callable, must accept an event, and return no value.
:return: None
"""
if not isinstance(event_type, type) and event_type is not ...:
raise TypeError(f"{type(self)}.register requires event_type to be a type.")
if not callable(callback):
raise TypeError(f"{type(self)}.register requires callback to be callable.")
self.event_extensions[event_type].append(callback)
def flush_events(self):
"""
Flush the event queue.
Call before doing anything that will cause signals to be delivered to
the wrong scene.
"""
self.events = deque()
|
ppb/pursuedpybear
|
ppb/engine.py
|
GameEngine.on_stop_scene
|
python
|
def on_stop_scene(self, event: events.StopScene, signal: Callable[[Any], None]):
self.stop_scene()
if self.current_scene is not None:
signal(events.SceneContinued())
else:
signal(events.Quit())
|
Stop a running scene. If there's a scene on the stack, it resumes.
|
train
|
https://github.com/ppb/pursuedpybear/blob/db3bfaaf86d14b4d1bb9e0b24cc8dc63f29c2191/ppb/engine.py#L150-L158
|
[
"def stop_scene(self):\n # Empty the queue before changing scenes.\n self.flush_events()\n self.signal(events.SceneStopped())\n self.publish()\n self.scenes.pop()\n"
] |
class GameEngine(Engine, EventMixin, LoggingMixin):
def __init__(self, first_scene: Type, *,
systems=(Renderer, Updater, PygameEventPoller),
scene_kwargs=None, **kwargs):
super(GameEngine, self).__init__()
# Engine Configuration
self.first_scene = first_scene
self.scene_kwargs = scene_kwargs or {}
self.kwargs = kwargs
# Engine State
self.scenes = []
self.events = deque()
self.event_extensions: DefaultDict[Union[Type, _ellipsis], List[Callable[[Any], None]]] = defaultdict(list)
self.running = False
self.entered = False
self._last_idle_time = None
# Systems
self.systems_classes = systems
self.systems = []
self.exit_stack = ExitStack()
@property
def current_scene(self):
try:
return self.scenes[-1]
except IndexError:
return None
def __enter__(self):
self.logger.info("Entering context")
self.start_systems()
self.entered = True
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.logger.info("Exiting context")
self.entered = False
self.exit_stack.close()
def start_systems(self):
if self.systems:
return
for system in self.systems_classes:
if isinstance(system, type):
system = system(engine=self, **self.kwargs)
self.systems.append(system)
self.exit_stack.enter_context(system)
def run(self):
if not self.entered:
with self:
self.start()
self.main_loop()
else:
self.start()
self.main_loop()
def start(self):
self.running = True
self._last_idle_time = time.monotonic()
self.activate({"scene_class": self.first_scene,
"kwargs": self.scene_kwargs})
def main_loop(self):
while self.running:
time.sleep(0)
now = time.monotonic()
self.signal(events.Idle(now - self._last_idle_time))
self._last_idle_time = now
while self.events:
self.publish()
self.manage_scene()
def activate(self, next_scene: dict):
scene = next_scene["scene_class"]
if scene is None:
return
args = next_scene.get("args", [])
kwargs = next_scene.get("kwargs", {})
self.scenes.append(scene(self, *args, **kwargs))
def signal(self, event):
self.events.append(event)
def publish(self):
event = self.events.popleft()
scene = self.current_scene
event.scene = scene
extensions = chain(self.event_extensions[type(event)], self.event_extensions[...])
for callback in extensions:
callback(event)
self.__event__(event, self.signal)
for system in self.systems:
system.__event__(event, self.signal)
# Required for if we publish with no current scene.
# Should only happen when the last scene stops via event.
if scene is not None:
scene.__event__(event, self.signal)
for game_object in scene:
game_object.__event__(event, self.signal)
def manage_scene(self):
if self.current_scene is None:
self.running = False
return None
scene_running, next_scene = self.current_scene.change()
if not scene_running:
self.scenes.pop()
if next_scene:
self.activate(next_scene)
def on_start_scene(self, event: StartScene, signal: Callable[[Any], None]):
"""
Start a new scene. The current scene pauses.
"""
self.pause_scene()
self.start_scene(event.new_scene, event.kwargs)
def on_replace_scene(self, event: events.ReplaceScene, signal):
"""
Replace the running scene with a new one.
"""
self.stop_scene()
self.start_scene(event.new_scene, event.kwargs)
def on_quit(self, quit_event: Quit, signal: Callable[[Any], None]):
self.running = False
def pause_scene(self):
# Empty the queue before changing scenes.
self.flush_events()
self.signal(events.ScenePaused())
self.publish()
def stop_scene(self):
# Empty the queue before changing scenes.
self.flush_events()
self.signal(events.SceneStopped())
self.publish()
self.scenes.pop()
def start_scene(self, scene, kwargs):
if isinstance(scene, type):
scene = scene(self, **(kwargs or {}))
self.scenes.append(scene)
self.signal(events.SceneStarted())
def register(self, event_type: Union[Type, _ellipsis], callback: Callable[[], Any]):
"""
Register a callback to be applied to an event at time of publishing.
Primarily to be used by subsystems.
The callback will receive the event. Your code should modify the event
in place. It does not need to return it.
:param event_type: The class of an event.
:param callback: A callable, must accept an event, and return no value.
:return: None
"""
if not isinstance(event_type, type) and event_type is not ...:
raise TypeError(f"{type(self)}.register requires event_type to be a type.")
if not callable(callback):
raise TypeError(f"{type(self)}.register requires callback to be callable.")
self.event_extensions[event_type].append(callback)
def flush_events(self):
"""
Flush the event queue.
Call before doing anything that will cause signals to be delivered to
the wrong scene.
"""
self.events = deque()
|
ppb/pursuedpybear
|
ppb/engine.py
|
GameEngine.on_replace_scene
|
python
|
def on_replace_scene(self, event: events.ReplaceScene, signal):
self.stop_scene()
self.start_scene(event.new_scene, event.kwargs)
|
Replace the running scene with a new one.
|
train
|
https://github.com/ppb/pursuedpybear/blob/db3bfaaf86d14b4d1bb9e0b24cc8dc63f29c2191/ppb/engine.py#L160-L165
|
[
"def stop_scene(self):\n # Empty the queue before changing scenes.\n self.flush_events()\n self.signal(events.SceneStopped())\n self.publish()\n self.scenes.pop()\n",
"def start_scene(self, scene, kwargs):\n if isinstance(scene, type):\n scene = scene(self, **(kwargs or {}))\n self.scenes.append(scene)\n self.signal(events.SceneStarted())\n"
] |
class GameEngine(Engine, EventMixin, LoggingMixin):
def __init__(self, first_scene: Type, *,
systems=(Renderer, Updater, PygameEventPoller),
scene_kwargs=None, **kwargs):
super(GameEngine, self).__init__()
# Engine Configuration
self.first_scene = first_scene
self.scene_kwargs = scene_kwargs or {}
self.kwargs = kwargs
# Engine State
self.scenes = []
self.events = deque()
self.event_extensions: DefaultDict[Union[Type, _ellipsis], List[Callable[[Any], None]]] = defaultdict(list)
self.running = False
self.entered = False
self._last_idle_time = None
# Systems
self.systems_classes = systems
self.systems = []
self.exit_stack = ExitStack()
@property
def current_scene(self):
try:
return self.scenes[-1]
except IndexError:
return None
def __enter__(self):
self.logger.info("Entering context")
self.start_systems()
self.entered = True
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.logger.info("Exiting context")
self.entered = False
self.exit_stack.close()
def start_systems(self):
if self.systems:
return
for system in self.systems_classes:
if isinstance(system, type):
system = system(engine=self, **self.kwargs)
self.systems.append(system)
self.exit_stack.enter_context(system)
def run(self):
if not self.entered:
with self:
self.start()
self.main_loop()
else:
self.start()
self.main_loop()
def start(self):
self.running = True
self._last_idle_time = time.monotonic()
self.activate({"scene_class": self.first_scene,
"kwargs": self.scene_kwargs})
def main_loop(self):
while self.running:
time.sleep(0)
now = time.monotonic()
self.signal(events.Idle(now - self._last_idle_time))
self._last_idle_time = now
while self.events:
self.publish()
self.manage_scene()
def activate(self, next_scene: dict):
scene = next_scene["scene_class"]
if scene is None:
return
args = next_scene.get("args", [])
kwargs = next_scene.get("kwargs", {})
self.scenes.append(scene(self, *args, **kwargs))
def signal(self, event):
self.events.append(event)
def publish(self):
event = self.events.popleft()
scene = self.current_scene
event.scene = scene
extensions = chain(self.event_extensions[type(event)], self.event_extensions[...])
for callback in extensions:
callback(event)
self.__event__(event, self.signal)
for system in self.systems:
system.__event__(event, self.signal)
# Required for if we publish with no current scene.
# Should only happen when the last scene stops via event.
if scene is not None:
scene.__event__(event, self.signal)
for game_object in scene:
game_object.__event__(event, self.signal)
def manage_scene(self):
if self.current_scene is None:
self.running = False
return None
scene_running, next_scene = self.current_scene.change()
if not scene_running:
self.scenes.pop()
if next_scene:
self.activate(next_scene)
def on_start_scene(self, event: StartScene, signal: Callable[[Any], None]):
"""
Start a new scene. The current scene pauses.
"""
self.pause_scene()
self.start_scene(event.new_scene, event.kwargs)
def on_stop_scene(self, event: events.StopScene, signal: Callable[[Any], None]):
"""
Stop a running scene. If there's a scene on the stack, it resumes.
"""
self.stop_scene()
if self.current_scene is not None:
signal(events.SceneContinued())
else:
signal(events.Quit())
def on_quit(self, quit_event: Quit, signal: Callable[[Any], None]):
self.running = False
def pause_scene(self):
# Empty the queue before changing scenes.
self.flush_events()
self.signal(events.ScenePaused())
self.publish()
def stop_scene(self):
# Empty the queue before changing scenes.
self.flush_events()
self.signal(events.SceneStopped())
self.publish()
self.scenes.pop()
def start_scene(self, scene, kwargs):
if isinstance(scene, type):
scene = scene(self, **(kwargs or {}))
self.scenes.append(scene)
self.signal(events.SceneStarted())
def register(self, event_type: Union[Type, _ellipsis], callback: Callable[[], Any]):
"""
Register a callback to be applied to an event at time of publishing.
Primarily to be used by subsystems.
The callback will receive the event. Your code should modify the event
in place. It does not need to return it.
:param event_type: The class of an event.
:param callback: A callable, must accept an event, and return no value.
:return: None
"""
if not isinstance(event_type, type) and event_type is not ...:
raise TypeError(f"{type(self)}.register requires event_type to be a type.")
if not callable(callback):
raise TypeError(f"{type(self)}.register requires callback to be callable.")
self.event_extensions[event_type].append(callback)
def flush_events(self):
"""
Flush the event queue.
Call before doing anything that will cause signals to be delivered to
the wrong scene.
"""
self.events = deque()
|
ppb/pursuedpybear
|
ppb/engine.py
|
GameEngine.register
|
python
|
def register(self, event_type: Union[Type, _ellipsis], callback: Callable[[], Any]):
if not isinstance(event_type, type) and event_type is not ...:
raise TypeError(f"{type(self)}.register requires event_type to be a type.")
if not callable(callback):
raise TypeError(f"{type(self)}.register requires callback to be callable.")
self.event_extensions[event_type].append(callback)
|
Register a callback to be applied to an event at time of publishing.
Primarily to be used by subsystems.
The callback will receive the event. Your code should modify the event
in place. It does not need to return it.
:param event_type: The class of an event.
:param callback: A callable, must accept an event, and return no value.
:return: None
|
train
|
https://github.com/ppb/pursuedpybear/blob/db3bfaaf86d14b4d1bb9e0b24cc8dc63f29c2191/ppb/engine.py#L189-L206
| null |
class GameEngine(Engine, EventMixin, LoggingMixin):
def __init__(self, first_scene: Type, *,
systems=(Renderer, Updater, PygameEventPoller),
scene_kwargs=None, **kwargs):
super(GameEngine, self).__init__()
# Engine Configuration
self.first_scene = first_scene
self.scene_kwargs = scene_kwargs or {}
self.kwargs = kwargs
# Engine State
self.scenes = []
self.events = deque()
self.event_extensions: DefaultDict[Union[Type, _ellipsis], List[Callable[[Any], None]]] = defaultdict(list)
self.running = False
self.entered = False
self._last_idle_time = None
# Systems
self.systems_classes = systems
self.systems = []
self.exit_stack = ExitStack()
@property
def current_scene(self):
try:
return self.scenes[-1]
except IndexError:
return None
def __enter__(self):
self.logger.info("Entering context")
self.start_systems()
self.entered = True
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.logger.info("Exiting context")
self.entered = False
self.exit_stack.close()
def start_systems(self):
if self.systems:
return
for system in self.systems_classes:
if isinstance(system, type):
system = system(engine=self, **self.kwargs)
self.systems.append(system)
self.exit_stack.enter_context(system)
def run(self):
if not self.entered:
with self:
self.start()
self.main_loop()
else:
self.start()
self.main_loop()
def start(self):
self.running = True
self._last_idle_time = time.monotonic()
self.activate({"scene_class": self.first_scene,
"kwargs": self.scene_kwargs})
def main_loop(self):
while self.running:
time.sleep(0)
now = time.monotonic()
self.signal(events.Idle(now - self._last_idle_time))
self._last_idle_time = now
while self.events:
self.publish()
self.manage_scene()
def activate(self, next_scene: dict):
scene = next_scene["scene_class"]
if scene is None:
return
args = next_scene.get("args", [])
kwargs = next_scene.get("kwargs", {})
self.scenes.append(scene(self, *args, **kwargs))
def signal(self, event):
self.events.append(event)
def publish(self):
event = self.events.popleft()
scene = self.current_scene
event.scene = scene
extensions = chain(self.event_extensions[type(event)], self.event_extensions[...])
for callback in extensions:
callback(event)
self.__event__(event, self.signal)
for system in self.systems:
system.__event__(event, self.signal)
# Required for if we publish with no current scene.
# Should only happen when the last scene stops via event.
if scene is not None:
scene.__event__(event, self.signal)
for game_object in scene:
game_object.__event__(event, self.signal)
def manage_scene(self):
if self.current_scene is None:
self.running = False
return None
scene_running, next_scene = self.current_scene.change()
if not scene_running:
self.scenes.pop()
if next_scene:
self.activate(next_scene)
def on_start_scene(self, event: StartScene, signal: Callable[[Any], None]):
"""
Start a new scene. The current scene pauses.
"""
self.pause_scene()
self.start_scene(event.new_scene, event.kwargs)
def on_stop_scene(self, event: events.StopScene, signal: Callable[[Any], None]):
"""
Stop a running scene. If there's a scene on the stack, it resumes.
"""
self.stop_scene()
if self.current_scene is not None:
signal(events.SceneContinued())
else:
signal(events.Quit())
def on_replace_scene(self, event: events.ReplaceScene, signal):
"""
Replace the running scene with a new one.
"""
self.stop_scene()
self.start_scene(event.new_scene, event.kwargs)
def on_quit(self, quit_event: Quit, signal: Callable[[Any], None]):
self.running = False
def pause_scene(self):
# Empty the queue before changing scenes.
self.flush_events()
self.signal(events.ScenePaused())
self.publish()
def stop_scene(self):
# Empty the queue before changing scenes.
self.flush_events()
self.signal(events.SceneStopped())
self.publish()
self.scenes.pop()
def start_scene(self, scene, kwargs):
if isinstance(scene, type):
scene = scene(self, **(kwargs or {}))
self.scenes.append(scene)
self.signal(events.SceneStarted())
def flush_events(self):
"""
Flush the event queue.
Call before doing anything that will cause signals to be delivered to
the wrong scene.
"""
self.events = deque()
|
ppb/pursuedpybear
|
ppb/utils.py
|
_build_index
|
python
|
def _build_index():
global _module_file_index
_module_file_index = {
mod.__file__: mod.__name__
for mod in sys.modules.values()
if hasattr(mod, '__file__') and hasattr(mod, '__name__')
}
|
Rebuild _module_file_index from sys.modules
|
train
|
https://github.com/ppb/pursuedpybear/blob/db3bfaaf86d14b4d1bb9e0b24cc8dc63f29c2191/ppb/utils.py#L11-L20
| null |
import logging
import sys
__all__ = 'LoggingMixin',
# Dictionary mapping file names -> module names
_module_file_index = {}
def _get_module(file_name):
"""
Find the module name for the given file name, or raise KeyError if it's
not a loaded module.
"""
if file_name not in _module_file_index:
_build_index()
return _module_file_index[file_name]
class LoggingMixin:
"""
A simple mixin to provide a `logger` attribute to instances, based on their
module.
"""
@property
def logger(self):
"""
The logger for this class.
"""
# This is internal/CPython only/etc
# It's also astonishingly faster than alternatives.
frame = sys._getframe(1)
file_name = frame.f_code.co_filename
module_name = _get_module(file_name)
return logging.getLogger(module_name)
|
ppb/pursuedpybear
|
ppb/utils.py
|
LoggingMixin.logger
|
python
|
def logger(self):
# This is internal/CPython only/etc
# It's also astonishingly faster than alternatives.
frame = sys._getframe(1)
file_name = frame.f_code.co_filename
module_name = _get_module(file_name)
return logging.getLogger(module_name)
|
The logger for this class.
|
train
|
https://github.com/ppb/pursuedpybear/blob/db3bfaaf86d14b4d1bb9e0b24cc8dc63f29c2191/ppb/utils.py#L40-L50
|
[
"def _get_module(file_name):\n \"\"\"\n Find the module name for the given file name, or raise KeyError if it's\n not a loaded module.\n \"\"\"\n if file_name not in _module_file_index:\n _build_index()\n return _module_file_index[file_name]\n"
] |
class LoggingMixin:
"""
A simple mixin to provide a `logger` attribute to instances, based on their
module.
"""
@property
|
ppb/pursuedpybear
|
ppb/features/animation.py
|
Animation.pause
|
python
|
def pause(self):
if not self._pause_level:
self._paused_time = self._clock() + self._offset
self._paused_frame = self.current_frame
self._pause_level += 1
|
Pause the animation.
|
train
|
https://github.com/ppb/pursuedpybear/blob/db3bfaaf86d14b4d1bb9e0b24cc8dc63f29c2191/ppb/features/animation.py#L72-L79
|
[
"def _clock(self):\n return type(self).clock()\n"
] |
class Animation:
"""
An "image" that actually rotates through numbered files at the specified rate.
"""
# Override this to change the clock used for frames.
clock = time.monotonic
def __init__(self, filename, frames_per_second):
"""
:param str filename: A path containing a ``{2..4}`` indicating the frame number
:param number frames_per_second: The number of frames to show each second
"""
self._filename = filename
self.frames_per_second = frames_per_second
self._paused_frame = None
self._pause_level = 0
self._frames = []
self._offset = -self._clock()
self._compile_filename()
def __repr__(self):
return f"{type(self).__name__}({self._filename!r}, {self.frames_per_second!r})"
# Do we need pickle/copy dunders?
def copy(self):
"""
Create a new Animation with the same filename and framerate. Pause
status and starting time are reset.
"""
return type(self)(self._filename, self.frames_per_second)
def _clock(self):
return type(self).clock()
@property
def filename(self):
return self._filename
@filename.setter
def filename(self, value):
self._filename = value
self._compile_filename()
def _compile_filename(self):
match = FILE_PATTERN.search(self._filename)
start, end = match.groups()
numdigits = min(len(start), len(end))
start = int(start)
end = int(end)
template = FILE_PATTERN.sub(
'{:0%dd}' % numdigits,
self._filename,
)
self._frames = [
template.format(n)
for n in range(start, end + 1)
]
def unpause(self):
"""
Unpause the animation.
"""
self._pause_level -= 1
if not self._pause_level:
self._offset = self._paused_time - self._clock()
def _current_frame(self, time):
if not self._pause_level:
return (
int((time + self._offset) * self.frames_per_second)
% len(self._frames)
)
else:
return self._paused_frame
@property
def current_frame(self):
"""
Compute the number of the current frame (0-indexed)
"""
if not self._pause_level:
return (
int((self._clock() + self._offset) * self.frames_per_second)
% len(self._frames)
)
else:
return self._paused_frame
def __str__(self):
"""
Get the current frame path.
"""
return self._frames[self.current_frame]
# This is so that if you assign an Animation to a class, instances will get
# their own copy, so their animations run independently.
_prop_name = None
def __get__(self, obj, type=None):
if obj is None:
return self
v = vars(obj)
if self._prop_name not in v:
v[self._prop_name] = self.copy()
return v[self._prop_name]
# Don't need __set__() or __delete__(), additional accesses will be via
# __dict__ directly.
def __set_name__(self, owner, name):
self._prop_name = name
|
ppb/pursuedpybear
|
ppb/features/animation.py
|
Animation.unpause
|
python
|
def unpause(self):
self._pause_level -= 1
if not self._pause_level:
self._offset = self._paused_time - self._clock()
|
Unpause the animation.
|
train
|
https://github.com/ppb/pursuedpybear/blob/db3bfaaf86d14b4d1bb9e0b24cc8dc63f29c2191/ppb/features/animation.py#L81-L87
|
[
"def _clock(self):\n return type(self).clock()\n"
] |
class Animation:
"""
An "image" that actually rotates through numbered files at the specified rate.
"""
# Override this to change the clock used for frames.
clock = time.monotonic
def __init__(self, filename, frames_per_second):
"""
:param str filename: A path containing a ``{2..4}`` indicating the frame number
:param number frames_per_second: The number of frames to show each second
"""
self._filename = filename
self.frames_per_second = frames_per_second
self._paused_frame = None
self._pause_level = 0
self._frames = []
self._offset = -self._clock()
self._compile_filename()
def __repr__(self):
return f"{type(self).__name__}({self._filename!r}, {self.frames_per_second!r})"
# Do we need pickle/copy dunders?
def copy(self):
"""
Create a new Animation with the same filename and framerate. Pause
status and starting time are reset.
"""
return type(self)(self._filename, self.frames_per_second)
def _clock(self):
return type(self).clock()
@property
def filename(self):
return self._filename
@filename.setter
def filename(self, value):
self._filename = value
self._compile_filename()
def _compile_filename(self):
match = FILE_PATTERN.search(self._filename)
start, end = match.groups()
numdigits = min(len(start), len(end))
start = int(start)
end = int(end)
template = FILE_PATTERN.sub(
'{:0%dd}' % numdigits,
self._filename,
)
self._frames = [
template.format(n)
for n in range(start, end + 1)
]
def pause(self):
"""
Pause the animation.
"""
if not self._pause_level:
self._paused_time = self._clock() + self._offset
self._paused_frame = self.current_frame
self._pause_level += 1
def _current_frame(self, time):
if not self._pause_level:
return (
int((time + self._offset) * self.frames_per_second)
% len(self._frames)
)
else:
return self._paused_frame
@property
def current_frame(self):
"""
Compute the number of the current frame (0-indexed)
"""
if not self._pause_level:
return (
int((self._clock() + self._offset) * self.frames_per_second)
% len(self._frames)
)
else:
return self._paused_frame
def __str__(self):
"""
Get the current frame path.
"""
return self._frames[self.current_frame]
# This is so that if you assign an Animation to a class, instances will get
# their own copy, so their animations run independently.
_prop_name = None
def __get__(self, obj, type=None):
if obj is None:
return self
v = vars(obj)
if self._prop_name not in v:
v[self._prop_name] = self.copy()
return v[self._prop_name]
# Don't need __set__() or __delete__(), additional accesses will be via
# __dict__ directly.
def __set_name__(self, owner, name):
self._prop_name = name
|
ppb/pursuedpybear
|
ppb/features/animation.py
|
Animation.current_frame
|
python
|
def current_frame(self):
if not self._pause_level:
return (
int((self._clock() + self._offset) * self.frames_per_second)
% len(self._frames)
)
else:
return self._paused_frame
|
Compute the number of the current frame (0-indexed)
|
train
|
https://github.com/ppb/pursuedpybear/blob/db3bfaaf86d14b4d1bb9e0b24cc8dc63f29c2191/ppb/features/animation.py#L99-L109
|
[
"def _clock(self):\n return type(self).clock()\n"
] |
class Animation:
"""
An "image" that actually rotates through numbered files at the specified rate.
"""
# Override this to change the clock used for frames.
clock = time.monotonic
def __init__(self, filename, frames_per_second):
"""
:param str filename: A path containing a ``{2..4}`` indicating the frame number
:param number frames_per_second: The number of frames to show each second
"""
self._filename = filename
self.frames_per_second = frames_per_second
self._paused_frame = None
self._pause_level = 0
self._frames = []
self._offset = -self._clock()
self._compile_filename()
def __repr__(self):
return f"{type(self).__name__}({self._filename!r}, {self.frames_per_second!r})"
# Do we need pickle/copy dunders?
def copy(self):
"""
Create a new Animation with the same filename and framerate. Pause
status and starting time are reset.
"""
return type(self)(self._filename, self.frames_per_second)
def _clock(self):
return type(self).clock()
@property
def filename(self):
return self._filename
@filename.setter
def filename(self, value):
self._filename = value
self._compile_filename()
def _compile_filename(self):
match = FILE_PATTERN.search(self._filename)
start, end = match.groups()
numdigits = min(len(start), len(end))
start = int(start)
end = int(end)
template = FILE_PATTERN.sub(
'{:0%dd}' % numdigits,
self._filename,
)
self._frames = [
template.format(n)
for n in range(start, end + 1)
]
def pause(self):
"""
Pause the animation.
"""
if not self._pause_level:
self._paused_time = self._clock() + self._offset
self._paused_frame = self.current_frame
self._pause_level += 1
def unpause(self):
"""
Unpause the animation.
"""
self._pause_level -= 1
if not self._pause_level:
self._offset = self._paused_time - self._clock()
def _current_frame(self, time):
if not self._pause_level:
return (
int((time + self._offset) * self.frames_per_second)
% len(self._frames)
)
else:
return self._paused_frame
@property
def __str__(self):
"""
Get the current frame path.
"""
return self._frames[self.current_frame]
# This is so that if you assign an Animation to a class, instances will get
# their own copy, so their animations run independently.
_prop_name = None
def __get__(self, obj, type=None):
if obj is None:
return self
v = vars(obj)
if self._prop_name not in v:
v[self._prop_name] = self.copy()
return v[self._prop_name]
# Don't need __set__() or __delete__(), additional accesses will be via
# __dict__ directly.
def __set_name__(self, owner, name):
self._prop_name = name
|
ppb/pursuedpybear
|
ppb/__init__.py
|
run
|
python
|
def run(setup: Callable[[BaseScene], None]=None, *, log_level=logging.WARNING,
starting_scene=BaseScene):
logging.basicConfig(level=log_level)
kwargs = {
"resolution": (800, 600),
"scene_kwargs": {
"set_up": setup,
}
}
with GameEngine(starting_scene, **kwargs) as eng:
eng.run()
|
Run a small game.
The resolution will 800 pixels wide by 600 pixels tall.
setup is a callable that accepts a scene and returns None.
log_level let's you set the expected log level. Consider logging.DEBUG if
something is behaving oddly.
starting_scene let's you change the scene used by the engine.
|
train
|
https://github.com/ppb/pursuedpybear/blob/db3bfaaf86d14b4d1bb9e0b24cc8dc63f29c2191/ppb/__init__.py#L10-L34
| null |
import logging
from typing import Callable
from ppb.vector import Vector
from ppb.engine import GameEngine
from ppb.scenes import BaseScene
from ppb.sprites import BaseSprite
|
ppb/pursuedpybear
|
ppb/scenes.py
|
GameObjectCollection.add
|
python
|
def add(self, game_object: Hashable, tags: Iterable[Hashable]=()) -> None:
if isinstance(tags, (str, bytes)):
raise TypeError("You passed a string instead of an iterable, this probably isn't what you intended.\n\nTry making it a tuple.")
self.all.add(game_object)
for kind in type(game_object).mro():
self.kinds[kind].add(game_object)
for tag in tags:
self.tags[tag].add(game_object)
|
Add a game_object to the container.
game_object: Any Hashable object. The item to be added.
tags: An iterable of Hashable objects. Values that can be used to
retrieve a group containing the game_object.
Examples:
container.add(MyObject())
container.add(MyObject(), tags=("red", "blue")
|
train
|
https://github.com/ppb/pursuedpybear/blob/db3bfaaf86d14b4d1bb9e0b24cc8dc63f29c2191/ppb/scenes.py#L35-L55
| null |
class GameObjectCollection(Collection):
"""A container for game objects."""
def __init__(self):
self.all = set()
self.kinds = defaultdict(set)
self.tags = defaultdict(set)
def __contains__(self, item: Hashable) -> bool:
return item in self.all
def __iter__(self) -> Iterator[Hashable]:
return (x for x in list(self.all))
def __len__(self) -> int:
return len(self.all)
def get(self, *, kind: Type=None, tag: Hashable=None, **_) -> Iterator:
"""
Get an iterator of objects by kind or tag.
kind: Any type. Pass to get a subset of contained items with the given
type.
tag: Any Hashable object. Pass to get a subset of contained items with
the given tag.
Pass both kind and tag to get objects that are both that type and that
tag.
Examples:
container.get(type=MyObject)
container.get(tag="red")
container.get(type=MyObject, tag="red")
"""
if kind is None and tag is None:
raise TypeError("get() takes at least one keyword-only argument. 'kind' or 'tag'.")
kinds = self.all
tags = self.all
if kind is not None:
kinds = self.kinds[kind]
if tag is not None:
tags = self.tags[tag]
return (x for x in kinds.intersection(tags))
def remove(self, game_object: Hashable) -> None:
"""
Remove the given object from the container.
game_object: A hashable contained by container.
Example:
container.remove(myObject)
"""
self.all.remove(game_object)
for kind in type(game_object).mro():
self.kinds[kind].remove(game_object)
for s in self.tags.values():
s.discard(game_object)
|
ppb/pursuedpybear
|
ppb/scenes.py
|
GameObjectCollection.get
|
python
|
def get(self, *, kind: Type=None, tag: Hashable=None, **_) -> Iterator:
if kind is None and tag is None:
raise TypeError("get() takes at least one keyword-only argument. 'kind' or 'tag'.")
kinds = self.all
tags = self.all
if kind is not None:
kinds = self.kinds[kind]
if tag is not None:
tags = self.tags[tag]
return (x for x in kinds.intersection(tags))
|
Get an iterator of objects by kind or tag.
kind: Any type. Pass to get a subset of contained items with the given
type.
tag: Any Hashable object. Pass to get a subset of contained items with
the given tag.
Pass both kind and tag to get objects that are both that type and that
tag.
Examples:
container.get(type=MyObject)
container.get(tag="red")
container.get(type=MyObject, tag="red")
|
train
|
https://github.com/ppb/pursuedpybear/blob/db3bfaaf86d14b4d1bb9e0b24cc8dc63f29c2191/ppb/scenes.py#L57-L84
| null |
class GameObjectCollection(Collection):
"""A container for game objects."""
def __init__(self):
self.all = set()
self.kinds = defaultdict(set)
self.tags = defaultdict(set)
def __contains__(self, item: Hashable) -> bool:
return item in self.all
def __iter__(self) -> Iterator[Hashable]:
return (x for x in list(self.all))
def __len__(self) -> int:
return len(self.all)
def add(self, game_object: Hashable, tags: Iterable[Hashable]=()) -> None:
"""
Add a game_object to the container.
game_object: Any Hashable object. The item to be added.
tags: An iterable of Hashable objects. Values that can be used to
retrieve a group containing the game_object.
Examples:
container.add(MyObject())
container.add(MyObject(), tags=("red", "blue")
"""
if isinstance(tags, (str, bytes)):
raise TypeError("You passed a string instead of an iterable, this probably isn't what you intended.\n\nTry making it a tuple.")
self.all.add(game_object)
for kind in type(game_object).mro():
self.kinds[kind].add(game_object)
for tag in tags:
self.tags[tag].add(game_object)
def remove(self, game_object: Hashable) -> None:
"""
Remove the given object from the container.
game_object: A hashable contained by container.
Example:
container.remove(myObject)
"""
self.all.remove(game_object)
for kind in type(game_object).mro():
self.kinds[kind].remove(game_object)
for s in self.tags.values():
s.discard(game_object)
|
ppb/pursuedpybear
|
ppb/scenes.py
|
GameObjectCollection.remove
|
python
|
def remove(self, game_object: Hashable) -> None:
self.all.remove(game_object)
for kind in type(game_object).mro():
self.kinds[kind].remove(game_object)
for s in self.tags.values():
s.discard(game_object)
|
Remove the given object from the container.
game_object: A hashable contained by container.
Example:
container.remove(myObject)
|
train
|
https://github.com/ppb/pursuedpybear/blob/db3bfaaf86d14b4d1bb9e0b24cc8dc63f29c2191/ppb/scenes.py#L86-L99
| null |
class GameObjectCollection(Collection):
"""A container for game objects."""
def __init__(self):
self.all = set()
self.kinds = defaultdict(set)
self.tags = defaultdict(set)
def __contains__(self, item: Hashable) -> bool:
return item in self.all
def __iter__(self) -> Iterator[Hashable]:
return (x for x in list(self.all))
def __len__(self) -> int:
return len(self.all)
def add(self, game_object: Hashable, tags: Iterable[Hashable]=()) -> None:
"""
Add a game_object to the container.
game_object: Any Hashable object. The item to be added.
tags: An iterable of Hashable objects. Values that can be used to
retrieve a group containing the game_object.
Examples:
container.add(MyObject())
container.add(MyObject(), tags=("red", "blue")
"""
if isinstance(tags, (str, bytes)):
raise TypeError("You passed a string instead of an iterable, this probably isn't what you intended.\n\nTry making it a tuple.")
self.all.add(game_object)
for kind in type(game_object).mro():
self.kinds[kind].add(game_object)
for tag in tags:
self.tags[tag].add(game_object)
def get(self, *, kind: Type=None, tag: Hashable=None, **_) -> Iterator:
"""
Get an iterator of objects by kind or tag.
kind: Any type. Pass to get a subset of contained items with the given
type.
tag: Any Hashable object. Pass to get a subset of contained items with
the given tag.
Pass both kind and tag to get objects that are both that type and that
tag.
Examples:
container.get(type=MyObject)
container.get(tag="red")
container.get(type=MyObject, tag="red")
"""
if kind is None and tag is None:
raise TypeError("get() takes at least one keyword-only argument. 'kind' or 'tag'.")
kinds = self.all
tags = self.all
if kind is not None:
kinds = self.kinds[kind]
if tag is not None:
tags = self.tags[tag]
return (x for x in kinds.intersection(tags))
|
ppb/pursuedpybear
|
ppb/scenes.py
|
BaseScene.change
|
python
|
def change(self) -> Tuple[bool, dict]:
next = self.next
self.next = None
if self.next or not self.running:
message = "The Scene.change interface is deprecated. Use the events commands instead."
warn(message, DeprecationWarning)
return self.running, {"scene_class": next}
|
Default case, override in subclass as necessary.
|
train
|
https://github.com/ppb/pursuedpybear/blob/db3bfaaf86d14b4d1bb9e0b24cc8dc63f29c2191/ppb/scenes.py#L144-L154
| null |
class BaseScene(Scene, EventMixin):
# Background color, in RGB, each channel is 0-255
background_color: Sequence[int] = (0, 0, 100)
container_class: Type = GameObjectCollection
def __init__(self, engine, *,
set_up: Callable=None, pixel_ratio: Number=64,
**kwargs):
super().__init__(engine)
for k, v in kwargs.items():
setattr(self, k, v)
self.game_objects = self.container_class()
self.main_camera = Camera(pixel_ratio=pixel_ratio)
if set_up is not None:
set_up(self)
def __contains__(self, item: Hashable) -> bool:
return item in self.game_objects
def __iter__(self) -> Iterator:
return (x for x in self.game_objects)
@property
def kinds(self):
return self.game_objects.kinds
@property
def tags(self):
return self.game_objects.tags
@property
def main_camera(self) -> Camera:
return next(self.game_objects.get(tag="main_camera"))
@main_camera.setter
def main_camera(self, value: Camera):
for camera in self.game_objects.get(tag="main_camera"):
self.game_objects.remove(camera)
self.game_objects.add(value, tags=["main_camera"])
def add(self, game_object: Hashable, tags: Iterable=())-> None:
"""
Add a game_object to the scene.
game_object: Any GameObject object. The item to be added.
tags: An iterable of Hashable objects. Values that can be used to
retrieve a group containing the game_object.
Examples:
scene.add(MyGameObject())
scene.add(MyGameObject(), tags=("red", "blue")
"""
self.game_objects.add(game_object, tags)
def get(self, *, kind: Type=None, tag: Hashable=None, **kwargs) -> Iterator:
"""
Get an iterator of GameObjects by kind or tag.
kind: Any type. Pass to get a subset of contained GameObjects with the
given type.
tag: Any Hashable object. Pass to get a subset of contained GameObjects
with the given tag.
Pass both kind and tag to get objects that are both that type and that
tag.
Examples:
scene.get(type=MyGameObject)
scene.get(tag="red")
scene.get(type=MyGameObject, tag="red")
"""
return self.game_objects.get(kind=kind, tag=tag, **kwargs)
def remove(self, game_object: Hashable) -> None:
"""
Remove the given object from the scene.
game_object: A game object.
Example:
scene.remove(my_game_object)
"""
self.game_objects.remove(game_object)
|
ppb/pursuedpybear
|
ppb/scenes.py
|
BaseScene.add
|
python
|
def add(self, game_object: Hashable, tags: Iterable=())-> None:
self.game_objects.add(game_object, tags)
|
Add a game_object to the scene.
game_object: Any GameObject object. The item to be added.
tags: An iterable of Hashable objects. Values that can be used to
retrieve a group containing the game_object.
Examples:
scene.add(MyGameObject())
scene.add(MyGameObject(), tags=("red", "blue")
|
train
|
https://github.com/ppb/pursuedpybear/blob/db3bfaaf86d14b4d1bb9e0b24cc8dc63f29c2191/ppb/scenes.py#L156-L169
| null |
class BaseScene(Scene, EventMixin):
# Background color, in RGB, each channel is 0-255
background_color: Sequence[int] = (0, 0, 100)
container_class: Type = GameObjectCollection
def __init__(self, engine, *,
set_up: Callable=None, pixel_ratio: Number=64,
**kwargs):
super().__init__(engine)
for k, v in kwargs.items():
setattr(self, k, v)
self.game_objects = self.container_class()
self.main_camera = Camera(pixel_ratio=pixel_ratio)
if set_up is not None:
set_up(self)
def __contains__(self, item: Hashable) -> bool:
return item in self.game_objects
def __iter__(self) -> Iterator:
return (x for x in self.game_objects)
@property
def kinds(self):
return self.game_objects.kinds
@property
def tags(self):
return self.game_objects.tags
@property
def main_camera(self) -> Camera:
return next(self.game_objects.get(tag="main_camera"))
@main_camera.setter
def main_camera(self, value: Camera):
for camera in self.game_objects.get(tag="main_camera"):
self.game_objects.remove(camera)
self.game_objects.add(value, tags=["main_camera"])
def change(self) -> Tuple[bool, dict]:
"""
Default case, override in subclass as necessary.
"""
next = self.next
self.next = None
if self.next or not self.running:
message = "The Scene.change interface is deprecated. Use the events commands instead."
warn(message, DeprecationWarning)
return self.running, {"scene_class": next}
def get(self, *, kind: Type=None, tag: Hashable=None, **kwargs) -> Iterator:
"""
Get an iterator of GameObjects by kind or tag.
kind: Any type. Pass to get a subset of contained GameObjects with the
given type.
tag: Any Hashable object. Pass to get a subset of contained GameObjects
with the given tag.
Pass both kind and tag to get objects that are both that type and that
tag.
Examples:
scene.get(type=MyGameObject)
scene.get(tag="red")
scene.get(type=MyGameObject, tag="red")
"""
return self.game_objects.get(kind=kind, tag=tag, **kwargs)
def remove(self, game_object: Hashable) -> None:
"""
Remove the given object from the scene.
game_object: A game object.
Example:
scene.remove(my_game_object)
"""
self.game_objects.remove(game_object)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.