repository_name
stringclasses 316
values | func_path_in_repository
stringlengths 6
223
| func_name
stringlengths 1
134
| language
stringclasses 1
value | func_code_string
stringlengths 57
65.5k
| func_documentation_string
stringlengths 1
46.3k
| split_name
stringclasses 1
value | func_code_url
stringlengths 91
315
| called_functions
listlengths 1
156
⌀ | enclosing_scope
stringlengths 2
1.48M
|
|---|---|---|---|---|---|---|---|---|---|
zulily/pudl
|
pudl/ad_user.py
|
ADUser.is_member
|
python
|
def is_member(self, group_distinguishedname):
#pylint: disable=no-member
if group_distinguishedname.lower() in [dn.lower() for dn in self.memberof]:
#pylint: enable=no-member
return True
else:
return False
|
For the current ADUser instance, determine if
the user is a member of a specific group (the group DN is used).
The result may not be accurate if explicit_membership_only was set to
True when the object factory method (user() or users()) was
called.
:param str group_distinguishedname: The group DistinguishedName
:return: A boolean indicating whether or not the user is a member of the group
:rtype: bool
|
train
|
https://github.com/zulily/pudl/blob/761eec76841964780e759e6bf6d5f06a54844a80/pudl/ad_user.py#L113-L130
| null |
class ADUser(ADObject):
"""A class to represent AD user objects. Includes a number of
helper methods, particularly object-factory related.
ADUser objects have minimal depth, with attributes set to
strings or lists. Available attributes are dependent
on the results returned by the LDAP query.
"""
# Some refactoring may be considered in the future that would
# involve passing the sAMAccountName to a contstructor override,
# and possibly moving users() to become static. Otherwise,
# instead of user() creating and returning a single new object,
# perhaps just populate the current ADUser instance, which
# could make a little more sense
def user(self, base_dn, samaccountname, attributes=(), explicit_membership_only=False):
"""Produces a single, populated ADUser object through the object factory.
Does not populate attributes for the caller instance.
:param str base_dn: The base DN to search within
:param str samaccountname: The user's sAMAccountName
:param list attributes: Object attributes to populate, defaults to all
:param bool explicit_membership_only: If set True, memberof will only
list groups for which the user is a directly referenced member
:return: A populated ADUser object
:rtype: ADUser
"""
users = self.users(base_dn, samaccountnames=[samaccountname],
attributes=attributes, explicit_membership_only=explicit_membership_only)
try:
# Usually we will find a match, but perhaps not always
return users[0]
except IndexError:
logging.info("%s - unable to retrieve object from AD by sAMAccountName", samaccountname)
def users(self, base_dn, samaccountnames=(), attributes=(), explicit_membership_only=False):
"""Gathers a list of ADUser objects
:param str base_dn: The base DN to search within
:param list attributes: Object attributes to populate, defaults to all
:param list samaccountnames: A list of usernames for which objects will be
created, defaults to all users if unspecified
:param bool explicit_membership_only: If set True, memberof will only
list groups for which users are directly referenced members
:return: A list of populated ADUser objects
:rtype: list
"""
ad_users = []
search_filter = '(&(objectClass=user)(!(objectClass=group))(!(objectClass=computer)){0})'
# If no samaccountnames specified, filter will pull all user objects under
# base_dn
if not samaccountnames:
search_filter = search_filter.format('(sAMAccountName=*)')
else:
# Extensible filter: http://bit.ly/1Qh4eyV
if len(samaccountnames) == 1:
account_names = '(sAMAccountName={0})'.format(samaccountnames[0])
else:
account_names = '(|{0})'.format(''.join(['(sAMAccountName={0})'.format(username) \
for username in samaccountnames]))
search_filter = search_filter.format(account_names)
logging.debug('%s Search filter: %s', self.__class__.__name__, search_filter)
results = self.adq.search(base_dn, search_filter, attributes)
for search_result in results:
adu = self._object_factory(search_result)
# Each results index 0 of the tuple is the DN
if not explicit_membership_only and 'memberof' in dir(adu):
memberof = [g[0] for g in self.adq.search(base_dn,
'(member:1.2.840.113556.1.4.1941:={0})'.\
format(search_result[0]),
attributes=['memberof'])]
adu.memberof = memberof
ad_users.append(adu)
return ad_users
def group_samaccountnames(self, base_dn):
"""For the current ADUser instance, determine which
groups the user is a member of and convert the
group DistinguishedNames to sAMAccountNames.
The resulting list of groups may not be complete
if explicit_membership_only was set to
True when the object factory method (user() or users()) was
called.
:param str base_dn: The base DN to search within
:return: A list of groups (sAMAccountNames) for which the
current ADUser instance is a member, sAMAccountNames
:rtype: list
"""
#pylint: disable=no-member
mappings = self.samaccountnames(base_dn, self.memberof)
#pylint: enable=no-member
groups = [samaccountname for samaccountname in mappings.values()]
if not groups:
logging.info("%s - unable to retrieve any groups for the current ADUser instance",
self.samaccountname)
return groups
|
zulily/pudl
|
pudl/ad_user.py
|
ADUser.group_samaccountnames
|
python
|
def group_samaccountnames(self, base_dn):
#pylint: disable=no-member
mappings = self.samaccountnames(base_dn, self.memberof)
#pylint: enable=no-member
groups = [samaccountname for samaccountname in mappings.values()]
if not groups:
logging.info("%s - unable to retrieve any groups for the current ADUser instance",
self.samaccountname)
return groups
|
For the current ADUser instance, determine which
groups the user is a member of and convert the
group DistinguishedNames to sAMAccountNames.
The resulting list of groups may not be complete
if explicit_membership_only was set to
True when the object factory method (user() or users()) was
called.
:param str base_dn: The base DN to search within
:return: A list of groups (sAMAccountNames) for which the
current ADUser instance is a member, sAMAccountNames
:rtype: list
|
train
|
https://github.com/zulily/pudl/blob/761eec76841964780e759e6bf6d5f06a54844a80/pudl/ad_user.py#L133-L155
|
[
"def samaccountnames(self, base_dn, distinguished_names):\n \"\"\"Retrieve the sAMAccountNames for the specified DNs\n\n :param str base_dn: The base DN to search within\n :param list distinguished_name: A list of distinguished names for which to\n retrieve sAMAccountNames\n\n :return: Key/value pairs mapping DistinguishedName to sAMAccountName\n :rtype: dict\n \"\"\"\n attributes = ['sAMAccountName']\n search_filter = '(|{0})'.format(''.join(['(DistinguishedName={0})'.format(dn)\n for dn in distinguished_names]))\n logging.debug('%s Search filter: %s', self.__class__.__name__, search_filter)\n\n results = self.adq.search(base_dn, search_filter, attributes)\n\n mappings = {result[0]: result[1]['sAMAccountName'][0] for result in results}\n\n return mappings\n"
] |
class ADUser(ADObject):
"""A class to represent AD user objects. Includes a number of
helper methods, particularly object-factory related.
ADUser objects have minimal depth, with attributes set to
strings or lists. Available attributes are dependent
on the results returned by the LDAP query.
"""
# Some refactoring may be considered in the future that would
# involve passing the sAMAccountName to a contstructor override,
# and possibly moving users() to become static. Otherwise,
# instead of user() creating and returning a single new object,
# perhaps just populate the current ADUser instance, which
# could make a little more sense
def user(self, base_dn, samaccountname, attributes=(), explicit_membership_only=False):
"""Produces a single, populated ADUser object through the object factory.
Does not populate attributes for the caller instance.
:param str base_dn: The base DN to search within
:param str samaccountname: The user's sAMAccountName
:param list attributes: Object attributes to populate, defaults to all
:param bool explicit_membership_only: If set True, memberof will only
list groups for which the user is a directly referenced member
:return: A populated ADUser object
:rtype: ADUser
"""
users = self.users(base_dn, samaccountnames=[samaccountname],
attributes=attributes, explicit_membership_only=explicit_membership_only)
try:
# Usually we will find a match, but perhaps not always
return users[0]
except IndexError:
logging.info("%s - unable to retrieve object from AD by sAMAccountName", samaccountname)
def users(self, base_dn, samaccountnames=(), attributes=(), explicit_membership_only=False):
"""Gathers a list of ADUser objects
:param str base_dn: The base DN to search within
:param list attributes: Object attributes to populate, defaults to all
:param list samaccountnames: A list of usernames for which objects will be
created, defaults to all users if unspecified
:param bool explicit_membership_only: If set True, memberof will only
list groups for which users are directly referenced members
:return: A list of populated ADUser objects
:rtype: list
"""
ad_users = []
search_filter = '(&(objectClass=user)(!(objectClass=group))(!(objectClass=computer)){0})'
# If no samaccountnames specified, filter will pull all user objects under
# base_dn
if not samaccountnames:
search_filter = search_filter.format('(sAMAccountName=*)')
else:
# Extensible filter: http://bit.ly/1Qh4eyV
if len(samaccountnames) == 1:
account_names = '(sAMAccountName={0})'.format(samaccountnames[0])
else:
account_names = '(|{0})'.format(''.join(['(sAMAccountName={0})'.format(username) \
for username in samaccountnames]))
search_filter = search_filter.format(account_names)
logging.debug('%s Search filter: %s', self.__class__.__name__, search_filter)
results = self.adq.search(base_dn, search_filter, attributes)
for search_result in results:
adu = self._object_factory(search_result)
# Each results index 0 of the tuple is the DN
if not explicit_membership_only and 'memberof' in dir(adu):
memberof = [g[0] for g in self.adq.search(base_dn,
'(member:1.2.840.113556.1.4.1941:={0})'.\
format(search_result[0]),
attributes=['memberof'])]
adu.memberof = memberof
ad_users.append(adu)
return ad_users
def is_member(self, group_distinguishedname):
"""For the current ADUser instance, determine if
the user is a member of a specific group (the group DN is used).
The result may not be accurate if explicit_membership_only was set to
True when the object factory method (user() or users()) was
called.
:param str group_distinguishedname: The group DistinguishedName
:return: A boolean indicating whether or not the user is a member of the group
:rtype: bool
"""
#pylint: disable=no-member
if group_distinguishedname.lower() in [dn.lower() for dn in self.memberof]:
#pylint: enable=no-member
return True
else:
return False
|
zulily/pudl
|
pudl/helper.py
|
object_filter
|
python
|
def object_filter(objects, grep):
filtered = []
if grep:
for ad_object in objects:
o_string = ' '.join([value for value in ad_object.to_dict().values()
if isinstance(value, str)])
skip = False
for regex in grep:
if not re.search(regex, o_string, re.M|re.S|re.I):
skip = True
break
if not skip:
filtered.append(ad_object)
return filtered
else:
return objects
|
Filter out any objects that do not have attributes with values matching
*all* regular expressions present in grep (AND, essentially)
:param objects ADObject: A list of ADObjects
:param grep list: A list of regular expressions that must match for filtering
:return: A list of filtered ADObjects
:rtype: list
|
train
|
https://github.com/zulily/pudl/blob/761eec76841964780e759e6bf6d5f06a54844a80/pudl/helper.py#L20-L45
| null |
# Copyright (C) 2015 zulily, llc
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""helper - a module containing a collection useful object manipulations"""
import json
import re
import yaml
def serialize(ad_objects, output_format='json', indent=2, attributes_only=False):
"""Serialize the object to the specified format
:param ad_objects list: A list of ADObjects to serialize
:param output_format str: The output format, json or yaml. Defaults to json
:param indent int: The number of spaces to indent, defaults to 2
:param attributes only: Only serialize the attributes found in the first record of the list
of ADObjects
:return: A serialized, formatted representation of the list of ADObjects
:rtype: str
"""
# If the request is to only show attributes for objects returned
# in the query, overwrite ad_objects with only those attributes present in
# the first object in the list
if attributes_only:
ad_objects = [key for key in sorted(ad_objects[0].keys())]
if output_format == 'json':
return json.dumps(ad_objects, indent=indent, ensure_ascii=False, sort_keys=True)
elif output_format == 'yaml':
return yaml.dump(sorted(ad_objects), indent=indent)
|
zulily/pudl
|
pudl/helper.py
|
serialize
|
python
|
def serialize(ad_objects, output_format='json', indent=2, attributes_only=False):
# If the request is to only show attributes for objects returned
# in the query, overwrite ad_objects with only those attributes present in
# the first object in the list
if attributes_only:
ad_objects = [key for key in sorted(ad_objects[0].keys())]
if output_format == 'json':
return json.dumps(ad_objects, indent=indent, ensure_ascii=False, sort_keys=True)
elif output_format == 'yaml':
return yaml.dump(sorted(ad_objects), indent=indent)
|
Serialize the object to the specified format
:param ad_objects list: A list of ADObjects to serialize
:param output_format str: The output format, json or yaml. Defaults to json
:param indent int: The number of spaces to indent, defaults to 2
:param attributes only: Only serialize the attributes found in the first record of the list
of ADObjects
:return: A serialized, formatted representation of the list of ADObjects
:rtype: str
|
train
|
https://github.com/zulily/pudl/blob/761eec76841964780e759e6bf6d5f06a54844a80/pudl/helper.py#L48-L70
| null |
# Copyright (C) 2015 zulily, llc
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""helper - a module containing a collection useful object manipulations"""
import json
import re
import yaml
def object_filter(objects, grep):
"""Filter out any objects that do not have attributes with values matching
*all* regular expressions present in grep (AND, essentially)
:param objects ADObject: A list of ADObjects
:param grep list: A list of regular expressions that must match for filtering
:return: A list of filtered ADObjects
:rtype: list
"""
filtered = []
if grep:
for ad_object in objects:
o_string = ' '.join([value for value in ad_object.to_dict().values()
if isinstance(value, str)])
skip = False
for regex in grep:
if not re.search(regex, o_string, re.M|re.S|re.I):
skip = True
break
if not skip:
filtered.append(ad_object)
return filtered
else:
return objects
|
zulily/pudl
|
pudl/ad_query.py
|
ADQuery.search
|
python
|
def search(self, base_dn, search_filter, attributes=()):
results = []
page = 0
while page == 0 or self.sprc.cookie:
page += 1
#pylint: disable=no-member
message_id = self.ldap.search_ext(base_dn, ldap.SCOPE_SUBTREE,
search_filter, attributes,
serverctrls=[self.sprc])
#pylint: enable=no-member
data, server_controls = self.ldap.result3(message_id)[1::2]
self.sprc.cookie = server_controls[0].cookie
logging.debug('%s - Page %s results: %s', \
self.__class__.__name__, page, ', '.join(k[0] for k in data))
results += [u for u in data]
return results
|
Perform an AD search
:param str base_dn: The base DN to search within
:param str search_filter: The search filter to apply, such as:
*objectClass=person*
:param list attributes: Object attributes to populate, defaults to all
|
train
|
https://github.com/zulily/pudl/blob/761eec76841964780e759e6bf6d5f06a54844a80/pudl/ad_query.py#L80-L103
| null |
class ADQuery(object): #pylint: disable=too-few-public-methods
"""Query Active directory with python-ldap. May be used directly, but is most
commonly used indirectly via ADObject-based classes. All connections
require TLS.
"""
def __init__(self, user, password,
ldap_url=LDAP_URL,
tls_no_verify=TLS_NO_VERIFY,
page_size=PAGE_SIZE):
"""The ADQuery constructor
:param str user: The LDAP user to connect as
:param str password: The LDAP user's password
:param str ldap_url: The url, defaults to *{0}*
:param bool tls_no_verify: If True, connect to servers with certificates not signed
by an authority we trust, defaults to False
:param int page_size: The max result set size, per page, defaults to *{1}*
""".format(LDAP_URL, PAGE_SIZE)
if tls_no_verify:
ldap_options = LDAP_OPTIONS_TLS_NO_VERIFY
else:
ldap_options = LDAP_OPTIONS
# Setup logging, assumes a root logger already exists with handlers
self.logger = logging.getLogger(__name__)
# Set LDAP options for the connection
for setting in ldap_options:
ldap.set_option(setting[0], setting[1])
self.ldap = ldap.initialize(ldap_url)
self.sprc = ldap.controls.SimplePagedResultsControl(True, page_size, '')
self.user = user
self.password = password
self.page_size = page_size
# Open the connection
self._open()
def _open(self):
"""Bind, use tls"""
try:
self.ldap.start_tls_s()
#pylint: disable=no-member
except ldap.CONNECT_ERROR:
#pylint: enable=no-member
logging.error('Unable to establish a connection to the LDAP server, ' + \
'please check the connection string ' + \
'and ensure the remote certificate is signed by a trusted authority.')
raise
self.ldap.simple_bind_s(self.user, self.password)
|
zulily/pudl
|
pudl/ad_query.py
|
ADQuery._open
|
python
|
def _open(self):
try:
self.ldap.start_tls_s()
#pylint: disable=no-member
except ldap.CONNECT_ERROR:
#pylint: enable=no-member
logging.error('Unable to establish a connection to the LDAP server, ' + \
'please check the connection string ' + \
'and ensure the remote certificate is signed by a trusted authority.')
raise
self.ldap.simple_bind_s(self.user, self.password)
|
Bind, use tls
|
train
|
https://github.com/zulily/pudl/blob/761eec76841964780e759e6bf6d5f06a54844a80/pudl/ad_query.py#L106-L118
| null |
class ADQuery(object): #pylint: disable=too-few-public-methods
"""Query Active directory with python-ldap. May be used directly, but is most
commonly used indirectly via ADObject-based classes. All connections
require TLS.
"""
def __init__(self, user, password,
ldap_url=LDAP_URL,
tls_no_verify=TLS_NO_VERIFY,
page_size=PAGE_SIZE):
"""The ADQuery constructor
:param str user: The LDAP user to connect as
:param str password: The LDAP user's password
:param str ldap_url: The url, defaults to *{0}*
:param bool tls_no_verify: If True, connect to servers with certificates not signed
by an authority we trust, defaults to False
:param int page_size: The max result set size, per page, defaults to *{1}*
""".format(LDAP_URL, PAGE_SIZE)
if tls_no_verify:
ldap_options = LDAP_OPTIONS_TLS_NO_VERIFY
else:
ldap_options = LDAP_OPTIONS
# Setup logging, assumes a root logger already exists with handlers
self.logger = logging.getLogger(__name__)
# Set LDAP options for the connection
for setting in ldap_options:
ldap.set_option(setting[0], setting[1])
self.ldap = ldap.initialize(ldap_url)
self.sprc = ldap.controls.SimplePagedResultsControl(True, page_size, '')
self.user = user
self.password = password
self.page_size = page_size
# Open the connection
self._open()
def search(self, base_dn, search_filter, attributes=()):
"""Perform an AD search
:param str base_dn: The base DN to search within
:param str search_filter: The search filter to apply, such as:
*objectClass=person*
:param list attributes: Object attributes to populate, defaults to all
"""
results = []
page = 0
while page == 0 or self.sprc.cookie:
page += 1
#pylint: disable=no-member
message_id = self.ldap.search_ext(base_dn, ldap.SCOPE_SUBTREE,
search_filter, attributes,
serverctrls=[self.sprc])
#pylint: enable=no-member
data, server_controls = self.ldap.result3(message_id)[1::2]
self.sprc.cookie = server_controls[0].cookie
logging.debug('%s - Page %s results: %s', \
self.__class__.__name__, page, ', '.join(k[0] for k in data))
results += [u for u in data]
return results
|
jd/tenacity
|
tenacity/after.py
|
after_log
|
python
|
def after_log(logger, log_level, sec_format="%0.3f"):
log_tpl = ("Finished call to '%s' after " + str(sec_format) + "(s), "
"this was the %s time calling it.")
def log_it(retry_state):
logger.log(log_level, log_tpl,
_utils.get_callback_name(retry_state.fn),
retry_state.seconds_since_start,
_utils.to_ordinal(retry_state.attempt_number))
return log_it
|
After call strategy that logs to some logger the finished attempt.
|
train
|
https://github.com/jd/tenacity/blob/354c40b7dc8e728c438668100dd020b65c84dfc6/tenacity/after.py#L24-L35
| null |
# Copyright 2016 Julien Danjou
# Copyright 2016 Joshua Harlow
# Copyright 2013-2014 Ray Holder
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from tenacity import _utils
def after_nothing(retry_state):
"""After call strategy that does nothing."""
|
jd/tenacity
|
tenacity/__init__.py
|
retry
|
python
|
def retry(*dargs, **dkw):
# support both @retry and @retry() as valid syntax
if len(dargs) == 1 and callable(dargs[0]):
return retry()(dargs[0])
else:
def wrap(f):
if asyncio and asyncio.iscoroutinefunction(f):
r = AsyncRetrying(*dargs, **dkw)
elif tornado and hasattr(tornado.gen, 'is_coroutine_function') \
and tornado.gen.is_coroutine_function(f):
r = TornadoRetrying(*dargs, **dkw)
else:
r = Retrying(*dargs, **dkw)
return r.wraps(f)
return wrap
|
Wrap a function with a new `Retrying` object.
:param dargs: positional arguments passed to Retrying object
:param dkw: keyword arguments passed to the Retrying object
|
train
|
https://github.com/jd/tenacity/blob/354c40b7dc8e728c438668100dd020b65c84dfc6/tenacity/__init__.py#L88-L109
|
[
"def retry(*dargs, **dkw):\n \"\"\"Wrap a function with a new `Retrying` object.\n\n :param dargs: positional arguments passed to Retrying object\n :param dkw: keyword arguments passed to the Retrying object\n \"\"\"\n # support both @retry and @retry() as valid syntax\n if len(dargs) == 1 and callable(dargs[0]):\n return retry()(dargs[0])\n else:\n def wrap(f):\n if asyncio and asyncio.iscoroutinefunction(f):\n r = AsyncRetrying(*dargs, **dkw)\n elif tornado and hasattr(tornado.gen, 'is_coroutine_function') \\\n and tornado.gen.is_coroutine_function(f):\n r = TornadoRetrying(*dargs, **dkw)\n else:\n r = Retrying(*dargs, **dkw)\n\n return r.wraps(f)\n\n return wrap\n",
"def wrap(f):\n if asyncio and asyncio.iscoroutinefunction(f):\n r = AsyncRetrying(*dargs, **dkw)\n elif tornado and hasattr(tornado.gen, 'is_coroutine_function') \\\n and tornado.gen.is_coroutine_function(f):\n r = TornadoRetrying(*dargs, **dkw)\n else:\n r = Retrying(*dargs, **dkw)\n\n return r.wraps(f)\n"
] |
# -*- coding: utf-8 -*-
# Copyright 2016-2018 Julien Danjou
# Copyright 2017 Elisey Zanko
# Copyright 2016 Étienne Bersac
# Copyright 2016 Joshua Harlow
# Copyright 2013-2014 Ray Holder
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
try:
import asyncio
except ImportError:
asyncio = None
try:
import tornado
except ImportError:
tornado = None
import sys
import threading
from concurrent import futures
import six
from tenacity import _utils
from tenacity import compat as _compat
# Import all built-in retry strategies for easier usage.
from .retry import retry_all # noqa
from .retry import retry_always # noqa
from .retry import retry_any # noqa
from .retry import retry_if_exception # noqa
from .retry import retry_if_exception_type # noqa
from .retry import retry_if_not_result # noqa
from .retry import retry_if_result # noqa
from .retry import retry_never # noqa
from .retry import retry_unless_exception_type # noqa
from .retry import retry_if_exception_message # noqa
from .retry import retry_if_not_exception_message # noqa
# Import all nap strategies for easier usage.
from .nap import sleep # noqa
from .nap import sleep_using_event # noqa
# Import all built-in stop strategies for easier usage.
from .stop import stop_after_attempt # noqa
from .stop import stop_after_delay # noqa
from .stop import stop_all # noqa
from .stop import stop_any # noqa
from .stop import stop_never # noqa
from .stop import stop_when_event_set # noqa
# Import all built-in wait strategies for easier usage.
from .wait import wait_chain # noqa
from .wait import wait_combine # noqa
from .wait import wait_exponential # noqa
from .wait import wait_fixed # noqa
from .wait import wait_incrementing # noqa
from .wait import wait_none # noqa
from .wait import wait_random # noqa
from .wait import wait_random_exponential # noqa
from .wait import wait_random_exponential as wait_full_jitter # noqa
# Import all built-in before strategies for easier usage.
from .before import before_log # noqa
from .before import before_nothing # noqa
# Import all built-in after strategies for easier usage.
from .after import after_log # noqa
from .after import after_nothing # noqa
# Import all built-in after strategies for easier usage.
from .before_sleep import before_sleep_log # noqa
from .before_sleep import before_sleep_nothing # noqa
class TryAgain(Exception):
"""Always retry the executed function when raised."""
NO_RESULT = object()
class DoAttempt(object):
pass
class DoSleep(float):
pass
class BaseAction(object):
"""Base class for representing actions to take by retry object.
Concrete implementations must define:
- __init__: to initialize all necessary fields
- REPR_ATTRS: class variable specifying attributes to include in repr(self)
- NAME: for identification in retry object methods and callbacks
"""
REPR_FIELDS = ()
NAME = None
def __repr__(self):
state_str = ', '.join('%s=%r' % (field, getattr(self, field))
for field in self.REPR_FIELDS)
return '%s(%s)' % (type(self).__name__, state_str)
def __str__(self):
return repr(self)
class RetryAction(BaseAction):
REPR_FIELDS = ('sleep',)
NAME = 'retry'
def __init__(self, sleep):
self.sleep = float(sleep)
_unset = object()
class RetryError(Exception):
"""Encapsulates the last attempt instance right before giving up."""
def __init__(self, last_attempt):
self.last_attempt = last_attempt
def reraise(self):
if self.last_attempt.failed:
raise self.last_attempt.result()
raise self
def __str__(self):
return "{0}[{1}]".format(self.__class__.__name__, self.last_attempt)
class BaseRetrying(object):
def __init__(self,
sleep=sleep,
stop=stop_never, wait=wait_none(),
retry=retry_if_exception_type(),
before=before_nothing,
after=after_nothing,
before_sleep=None,
reraise=False,
retry_error_cls=RetryError,
retry_error_callback=None):
self.sleep = sleep
self._stop = stop
self._wait = wait
self._retry = retry
self._before = before
self._after = after
self._before_sleep = before_sleep
self.reraise = reraise
self._local = threading.local()
self.retry_error_cls = retry_error_cls
self._retry_error_callback = retry_error_callback
# This attribute was moved to RetryCallState and is deprecated on
# Retrying objects but kept for backward compatibility.
self.fn = None
@_utils.cached_property
def stop(self):
return _compat.stop_func_accept_retry_state(self._stop)
@_utils.cached_property
def wait(self):
return _compat.wait_func_accept_retry_state(self._wait)
@_utils.cached_property
def retry(self):
return _compat.retry_func_accept_retry_state(self._retry)
@_utils.cached_property
def before(self):
return _compat.before_func_accept_retry_state(self._before)
@_utils.cached_property
def after(self):
return _compat.after_func_accept_retry_state(self._after)
@_utils.cached_property
def before_sleep(self):
return _compat.before_sleep_func_accept_retry_state(self._before_sleep)
@_utils.cached_property
def retry_error_callback(self):
return _compat.retry_error_callback_accept_retry_state(
self._retry_error_callback)
def copy(self, sleep=_unset, stop=_unset, wait=_unset,
retry=_unset, before=_unset, after=_unset, before_sleep=_unset,
reraise=_unset):
"""Copy this object with some parameters changed if needed."""
if before_sleep is _unset:
before_sleep = self.before_sleep
return self.__class__(
sleep=self.sleep if sleep is _unset else sleep,
stop=self.stop if stop is _unset else stop,
wait=self.wait if wait is _unset else wait,
retry=self.retry if retry is _unset else retry,
before=self.before if before is _unset else before,
after=self.after if after is _unset else after,
before_sleep=before_sleep,
reraise=self.reraise if after is _unset else reraise,
)
def __repr__(self):
attrs = dict(
_utils.visible_attrs(self, attrs={'me': id(self)}),
__class__=self.__class__.__name__,
)
return ("<%(__class__)s object at 0x%(me)x (stop=%(stop)s, "
"wait=%(wait)s, sleep=%(sleep)s, retry=%(retry)s, "
"before=%(before)s, after=%(after)s)>") % (attrs)
@property
def statistics(self):
"""Return a dictionary of runtime statistics.
This dictionary will be empty when the controller has never been
ran. When it is running or has ran previously it should have (but
may not) have useful and/or informational keys and values when
running is underway and/or completed.
.. warning:: The keys in this dictionary **should** be some what
stable (not changing), but there existence **may**
change between major releases as new statistics are
gathered or removed so before accessing keys ensure that
they actually exist and handle when they do not.
.. note:: The values in this dictionary are local to the thread
running call (so if multiple threads share the same retrying
object - either directly or indirectly) they will each have
there own view of statistics they have collected (in the
future we may provide a way to aggregate the various
statistics from each thread).
"""
try:
return self._local.statistics
except AttributeError:
self._local.statistics = {}
return self._local.statistics
def wraps(self, f):
"""Wrap a function for retrying.
:param f: A function to wraps for retrying.
"""
@_utils.wraps(f)
def wrapped_f(*args, **kw):
return self.call(f, *args, **kw)
def retry_with(*args, **kwargs):
return self.copy(*args, **kwargs).wraps(f)
wrapped_f.retry = self
wrapped_f.retry_with = retry_with
return wrapped_f
def begin(self, fn):
self.statistics.clear()
self.statistics['start_time'] = _utils.now()
self.statistics['attempt_number'] = 1
self.statistics['idle_for'] = 0
self.fn = fn
def iter(self, retry_state): # noqa
fut = retry_state.outcome
if fut is None:
if self.before is not None:
self.before(retry_state)
return DoAttempt()
is_explicit_retry = retry_state.outcome.failed \
and isinstance(retry_state.outcome.exception(), TryAgain)
if not (is_explicit_retry or self.retry(retry_state=retry_state)):
return fut.result()
if self.after is not None:
self.after(retry_state=retry_state)
self.statistics['delay_since_first_attempt'] = \
retry_state.seconds_since_start
if self.stop(retry_state=retry_state):
if self.retry_error_callback:
return self.retry_error_callback(retry_state=retry_state)
retry_exc = self.retry_error_cls(fut)
if self.reraise:
raise retry_exc.reraise()
six.raise_from(retry_exc, fut.exception())
if self.wait:
sleep = self.wait(retry_state=retry_state)
else:
sleep = 0.0
retry_state.next_action = RetryAction(sleep)
retry_state.idle_for += sleep
self.statistics['idle_for'] += sleep
self.statistics['attempt_number'] += 1
if self.before_sleep is not None:
self.before_sleep(retry_state=retry_state)
return DoSleep(sleep)
class Retrying(BaseRetrying):
"""Retrying controller."""
def call(self, fn, *args, **kwargs):
self.begin(fn)
retry_state = RetryCallState(
retry_object=self, fn=fn, args=args, kwargs=kwargs)
while True:
do = self.iter(retry_state=retry_state)
if isinstance(do, DoAttempt):
try:
result = fn(*args, **kwargs)
except BaseException:
retry_state.set_exception(sys.exc_info())
else:
retry_state.set_result(result)
elif isinstance(do, DoSleep):
retry_state.prepare_for_next_attempt()
self.sleep(do)
else:
return do
__call__ = call
class Future(futures.Future):
"""Encapsulates a (future or past) attempted call to a target function."""
def __init__(self, attempt_number):
super(Future, self).__init__()
self.attempt_number = attempt_number
@property
def failed(self):
"""Return whether a exception is being held in this future."""
return self.exception() is not None
@classmethod
def construct(cls, attempt_number, value, has_exception):
"""Construct a new Future object."""
fut = cls(attempt_number)
if has_exception:
fut.set_exception(value)
else:
fut.set_result(value)
return fut
class RetryCallState(object):
"""State related to a single call wrapped with Retrying."""
def __init__(self, retry_object, fn, args, kwargs):
#: Retry call start timestamp
self.start_time = _utils.now()
#: Retry manager object
self.retry_object = retry_object
#: Function wrapped by this retry call
self.fn = fn
#: Arguments of the function wrapped by this retry call
self.args = args
#: Keyword arguments of the function wrapped by this retry call
self.kwargs = kwargs
#: The number of the current attempt
self.attempt_number = 1
#: Last outcome (result or exception) produced by the function
self.outcome = None
#: Timestamp of the last outcome
self.outcome_timestamp = None
#: Time spent sleeping in retries
self.idle_for = 0
#: Next action as decided by the retry manager
self.next_action = None
@property
def seconds_since_start(self):
if self.outcome_timestamp is None:
return None
return self.outcome_timestamp - self.start_time
def prepare_for_next_attempt(self):
self.outcome = None
self.outcome_timestamp = None
self.attempt_number += 1
self.next_action = None
def set_result(self, val):
ts = _utils.now()
fut = Future(self.attempt_number)
fut.set_result(val)
self.outcome, self.outcome_timestamp = fut, ts
def set_exception(self, exc_info):
ts = _utils.now()
fut = Future(self.attempt_number)
_utils.capture(fut, exc_info)
self.outcome, self.outcome_timestamp = fut, ts
if asyncio:
from tenacity._asyncio import AsyncRetrying
if tornado:
from tenacity.tornadoweb import TornadoRetrying
|
jd/tenacity
|
tenacity/__init__.py
|
BaseRetrying.copy
|
python
|
def copy(self, sleep=_unset, stop=_unset, wait=_unset,
retry=_unset, before=_unset, after=_unset, before_sleep=_unset,
reraise=_unset):
if before_sleep is _unset:
before_sleep = self.before_sleep
return self.__class__(
sleep=self.sleep if sleep is _unset else sleep,
stop=self.stop if stop is _unset else stop,
wait=self.wait if wait is _unset else wait,
retry=self.retry if retry is _unset else retry,
before=self.before if before is _unset else before,
after=self.after if after is _unset else after,
before_sleep=before_sleep,
reraise=self.reraise if after is _unset else reraise,
)
|
Copy this object with some parameters changed if needed.
|
train
|
https://github.com/jd/tenacity/blob/354c40b7dc8e728c438668100dd020b65c84dfc6/tenacity/__init__.py#L231-L246
| null |
class BaseRetrying(object):
def __init__(self,
sleep=sleep,
stop=stop_never, wait=wait_none(),
retry=retry_if_exception_type(),
before=before_nothing,
after=after_nothing,
before_sleep=None,
reraise=False,
retry_error_cls=RetryError,
retry_error_callback=None):
self.sleep = sleep
self._stop = stop
self._wait = wait
self._retry = retry
self._before = before
self._after = after
self._before_sleep = before_sleep
self.reraise = reraise
self._local = threading.local()
self.retry_error_cls = retry_error_cls
self._retry_error_callback = retry_error_callback
# This attribute was moved to RetryCallState and is deprecated on
# Retrying objects but kept for backward compatibility.
self.fn = None
@_utils.cached_property
def stop(self):
return _compat.stop_func_accept_retry_state(self._stop)
@_utils.cached_property
def wait(self):
return _compat.wait_func_accept_retry_state(self._wait)
@_utils.cached_property
def retry(self):
return _compat.retry_func_accept_retry_state(self._retry)
@_utils.cached_property
def before(self):
return _compat.before_func_accept_retry_state(self._before)
@_utils.cached_property
def after(self):
return _compat.after_func_accept_retry_state(self._after)
@_utils.cached_property
def before_sleep(self):
return _compat.before_sleep_func_accept_retry_state(self._before_sleep)
@_utils.cached_property
def retry_error_callback(self):
return _compat.retry_error_callback_accept_retry_state(
self._retry_error_callback)
def __repr__(self):
attrs = dict(
_utils.visible_attrs(self, attrs={'me': id(self)}),
__class__=self.__class__.__name__,
)
return ("<%(__class__)s object at 0x%(me)x (stop=%(stop)s, "
"wait=%(wait)s, sleep=%(sleep)s, retry=%(retry)s, "
"before=%(before)s, after=%(after)s)>") % (attrs)
@property
def statistics(self):
"""Return a dictionary of runtime statistics.
This dictionary will be empty when the controller has never been
ran. When it is running or has ran previously it should have (but
may not) have useful and/or informational keys and values when
running is underway and/or completed.
.. warning:: The keys in this dictionary **should** be some what
stable (not changing), but there existence **may**
change between major releases as new statistics are
gathered or removed so before accessing keys ensure that
they actually exist and handle when they do not.
.. note:: The values in this dictionary are local to the thread
running call (so if multiple threads share the same retrying
object - either directly or indirectly) they will each have
there own view of statistics they have collected (in the
future we may provide a way to aggregate the various
statistics from each thread).
"""
try:
return self._local.statistics
except AttributeError:
self._local.statistics = {}
return self._local.statistics
def wraps(self, f):
"""Wrap a function for retrying.
:param f: A function to wraps for retrying.
"""
@_utils.wraps(f)
def wrapped_f(*args, **kw):
return self.call(f, *args, **kw)
def retry_with(*args, **kwargs):
return self.copy(*args, **kwargs).wraps(f)
wrapped_f.retry = self
wrapped_f.retry_with = retry_with
return wrapped_f
def begin(self, fn):
self.statistics.clear()
self.statistics['start_time'] = _utils.now()
self.statistics['attempt_number'] = 1
self.statistics['idle_for'] = 0
self.fn = fn
def iter(self, retry_state): # noqa
fut = retry_state.outcome
if fut is None:
if self.before is not None:
self.before(retry_state)
return DoAttempt()
is_explicit_retry = retry_state.outcome.failed \
and isinstance(retry_state.outcome.exception(), TryAgain)
if not (is_explicit_retry or self.retry(retry_state=retry_state)):
return fut.result()
if self.after is not None:
self.after(retry_state=retry_state)
self.statistics['delay_since_first_attempt'] = \
retry_state.seconds_since_start
if self.stop(retry_state=retry_state):
if self.retry_error_callback:
return self.retry_error_callback(retry_state=retry_state)
retry_exc = self.retry_error_cls(fut)
if self.reraise:
raise retry_exc.reraise()
six.raise_from(retry_exc, fut.exception())
if self.wait:
sleep = self.wait(retry_state=retry_state)
else:
sleep = 0.0
retry_state.next_action = RetryAction(sleep)
retry_state.idle_for += sleep
self.statistics['idle_for'] += sleep
self.statistics['attempt_number'] += 1
if self.before_sleep is not None:
self.before_sleep(retry_state=retry_state)
return DoSleep(sleep)
|
jd/tenacity
|
tenacity/__init__.py
|
BaseRetrying.statistics
|
python
|
def statistics(self):
try:
return self._local.statistics
except AttributeError:
self._local.statistics = {}
return self._local.statistics
|
Return a dictionary of runtime statistics.
This dictionary will be empty when the controller has never been
ran. When it is running or has ran previously it should have (but
may not) have useful and/or informational keys and values when
running is underway and/or completed.
.. warning:: The keys in this dictionary **should** be some what
stable (not changing), but there existence **may**
change between major releases as new statistics are
gathered or removed so before accessing keys ensure that
they actually exist and handle when they do not.
.. note:: The values in this dictionary are local to the thread
running call (so if multiple threads share the same retrying
object - either directly or indirectly) they will each have
there own view of statistics they have collected (in the
future we may provide a way to aggregate the various
statistics from each thread).
|
train
|
https://github.com/jd/tenacity/blob/354c40b7dc8e728c438668100dd020b65c84dfc6/tenacity/__init__.py#L258-L283
| null |
class BaseRetrying(object):
def __init__(self,
sleep=sleep,
stop=stop_never, wait=wait_none(),
retry=retry_if_exception_type(),
before=before_nothing,
after=after_nothing,
before_sleep=None,
reraise=False,
retry_error_cls=RetryError,
retry_error_callback=None):
self.sleep = sleep
self._stop = stop
self._wait = wait
self._retry = retry
self._before = before
self._after = after
self._before_sleep = before_sleep
self.reraise = reraise
self._local = threading.local()
self.retry_error_cls = retry_error_cls
self._retry_error_callback = retry_error_callback
# This attribute was moved to RetryCallState and is deprecated on
# Retrying objects but kept for backward compatibility.
self.fn = None
@_utils.cached_property
def stop(self):
return _compat.stop_func_accept_retry_state(self._stop)
@_utils.cached_property
def wait(self):
return _compat.wait_func_accept_retry_state(self._wait)
@_utils.cached_property
def retry(self):
return _compat.retry_func_accept_retry_state(self._retry)
@_utils.cached_property
def before(self):
return _compat.before_func_accept_retry_state(self._before)
@_utils.cached_property
def after(self):
return _compat.after_func_accept_retry_state(self._after)
@_utils.cached_property
def before_sleep(self):
return _compat.before_sleep_func_accept_retry_state(self._before_sleep)
@_utils.cached_property
def retry_error_callback(self):
return _compat.retry_error_callback_accept_retry_state(
self._retry_error_callback)
def copy(self, sleep=_unset, stop=_unset, wait=_unset,
retry=_unset, before=_unset, after=_unset, before_sleep=_unset,
reraise=_unset):
"""Copy this object with some parameters changed if needed."""
if before_sleep is _unset:
before_sleep = self.before_sleep
return self.__class__(
sleep=self.sleep if sleep is _unset else sleep,
stop=self.stop if stop is _unset else stop,
wait=self.wait if wait is _unset else wait,
retry=self.retry if retry is _unset else retry,
before=self.before if before is _unset else before,
after=self.after if after is _unset else after,
before_sleep=before_sleep,
reraise=self.reraise if after is _unset else reraise,
)
def __repr__(self):
attrs = dict(
_utils.visible_attrs(self, attrs={'me': id(self)}),
__class__=self.__class__.__name__,
)
return ("<%(__class__)s object at 0x%(me)x (stop=%(stop)s, "
"wait=%(wait)s, sleep=%(sleep)s, retry=%(retry)s, "
"before=%(before)s, after=%(after)s)>") % (attrs)
@property
def wraps(self, f):
"""Wrap a function for retrying.
:param f: A function to wraps for retrying.
"""
@_utils.wraps(f)
def wrapped_f(*args, **kw):
return self.call(f, *args, **kw)
def retry_with(*args, **kwargs):
return self.copy(*args, **kwargs).wraps(f)
wrapped_f.retry = self
wrapped_f.retry_with = retry_with
return wrapped_f
def begin(self, fn):
self.statistics.clear()
self.statistics['start_time'] = _utils.now()
self.statistics['attempt_number'] = 1
self.statistics['idle_for'] = 0
self.fn = fn
def iter(self, retry_state): # noqa
fut = retry_state.outcome
if fut is None:
if self.before is not None:
self.before(retry_state)
return DoAttempt()
is_explicit_retry = retry_state.outcome.failed \
and isinstance(retry_state.outcome.exception(), TryAgain)
if not (is_explicit_retry or self.retry(retry_state=retry_state)):
return fut.result()
if self.after is not None:
self.after(retry_state=retry_state)
self.statistics['delay_since_first_attempt'] = \
retry_state.seconds_since_start
if self.stop(retry_state=retry_state):
if self.retry_error_callback:
return self.retry_error_callback(retry_state=retry_state)
retry_exc = self.retry_error_cls(fut)
if self.reraise:
raise retry_exc.reraise()
six.raise_from(retry_exc, fut.exception())
if self.wait:
sleep = self.wait(retry_state=retry_state)
else:
sleep = 0.0
retry_state.next_action = RetryAction(sleep)
retry_state.idle_for += sleep
self.statistics['idle_for'] += sleep
self.statistics['attempt_number'] += 1
if self.before_sleep is not None:
self.before_sleep(retry_state=retry_state)
return DoSleep(sleep)
|
jd/tenacity
|
tenacity/__init__.py
|
BaseRetrying.wraps
|
python
|
def wraps(self, f):
@_utils.wraps(f)
def wrapped_f(*args, **kw):
return self.call(f, *args, **kw)
def retry_with(*args, **kwargs):
return self.copy(*args, **kwargs).wraps(f)
wrapped_f.retry = self
wrapped_f.retry_with = retry_with
return wrapped_f
|
Wrap a function for retrying.
:param f: A function to wraps for retrying.
|
train
|
https://github.com/jd/tenacity/blob/354c40b7dc8e728c438668100dd020b65c84dfc6/tenacity/__init__.py#L285-L300
| null |
class BaseRetrying(object):
def __init__(self,
sleep=sleep,
stop=stop_never, wait=wait_none(),
retry=retry_if_exception_type(),
before=before_nothing,
after=after_nothing,
before_sleep=None,
reraise=False,
retry_error_cls=RetryError,
retry_error_callback=None):
self.sleep = sleep
self._stop = stop
self._wait = wait
self._retry = retry
self._before = before
self._after = after
self._before_sleep = before_sleep
self.reraise = reraise
self._local = threading.local()
self.retry_error_cls = retry_error_cls
self._retry_error_callback = retry_error_callback
# This attribute was moved to RetryCallState and is deprecated on
# Retrying objects but kept for backward compatibility.
self.fn = None
@_utils.cached_property
def stop(self):
return _compat.stop_func_accept_retry_state(self._stop)
@_utils.cached_property
def wait(self):
return _compat.wait_func_accept_retry_state(self._wait)
@_utils.cached_property
def retry(self):
return _compat.retry_func_accept_retry_state(self._retry)
@_utils.cached_property
def before(self):
return _compat.before_func_accept_retry_state(self._before)
@_utils.cached_property
def after(self):
return _compat.after_func_accept_retry_state(self._after)
@_utils.cached_property
def before_sleep(self):
return _compat.before_sleep_func_accept_retry_state(self._before_sleep)
@_utils.cached_property
def retry_error_callback(self):
return _compat.retry_error_callback_accept_retry_state(
self._retry_error_callback)
def copy(self, sleep=_unset, stop=_unset, wait=_unset,
retry=_unset, before=_unset, after=_unset, before_sleep=_unset,
reraise=_unset):
"""Copy this object with some parameters changed if needed."""
if before_sleep is _unset:
before_sleep = self.before_sleep
return self.__class__(
sleep=self.sleep if sleep is _unset else sleep,
stop=self.stop if stop is _unset else stop,
wait=self.wait if wait is _unset else wait,
retry=self.retry if retry is _unset else retry,
before=self.before if before is _unset else before,
after=self.after if after is _unset else after,
before_sleep=before_sleep,
reraise=self.reraise if after is _unset else reraise,
)
def __repr__(self):
attrs = dict(
_utils.visible_attrs(self, attrs={'me': id(self)}),
__class__=self.__class__.__name__,
)
return ("<%(__class__)s object at 0x%(me)x (stop=%(stop)s, "
"wait=%(wait)s, sleep=%(sleep)s, retry=%(retry)s, "
"before=%(before)s, after=%(after)s)>") % (attrs)
@property
def statistics(self):
"""Return a dictionary of runtime statistics.
This dictionary will be empty when the controller has never been
ran. When it is running or has ran previously it should have (but
may not) have useful and/or informational keys and values when
running is underway and/or completed.
.. warning:: The keys in this dictionary **should** be some what
stable (not changing), but there existence **may**
change between major releases as new statistics are
gathered or removed so before accessing keys ensure that
they actually exist and handle when they do not.
.. note:: The values in this dictionary are local to the thread
running call (so if multiple threads share the same retrying
object - either directly or indirectly) they will each have
there own view of statistics they have collected (in the
future we may provide a way to aggregate the various
statistics from each thread).
"""
try:
return self._local.statistics
except AttributeError:
self._local.statistics = {}
return self._local.statistics
def begin(self, fn):
self.statistics.clear()
self.statistics['start_time'] = _utils.now()
self.statistics['attempt_number'] = 1
self.statistics['idle_for'] = 0
self.fn = fn
def iter(self, retry_state): # noqa
fut = retry_state.outcome
if fut is None:
if self.before is not None:
self.before(retry_state)
return DoAttempt()
is_explicit_retry = retry_state.outcome.failed \
and isinstance(retry_state.outcome.exception(), TryAgain)
if not (is_explicit_retry or self.retry(retry_state=retry_state)):
return fut.result()
if self.after is not None:
self.after(retry_state=retry_state)
self.statistics['delay_since_first_attempt'] = \
retry_state.seconds_since_start
if self.stop(retry_state=retry_state):
if self.retry_error_callback:
return self.retry_error_callback(retry_state=retry_state)
retry_exc = self.retry_error_cls(fut)
if self.reraise:
raise retry_exc.reraise()
six.raise_from(retry_exc, fut.exception())
if self.wait:
sleep = self.wait(retry_state=retry_state)
else:
sleep = 0.0
retry_state.next_action = RetryAction(sleep)
retry_state.idle_for += sleep
self.statistics['idle_for'] += sleep
self.statistics['attempt_number'] += 1
if self.before_sleep is not None:
self.before_sleep(retry_state=retry_state)
return DoSleep(sleep)
|
jd/tenacity
|
tenacity/__init__.py
|
Future.construct
|
python
|
def construct(cls, attempt_number, value, has_exception):
fut = cls(attempt_number)
if has_exception:
fut.set_exception(value)
else:
fut.set_result(value)
return fut
|
Construct a new Future object.
|
train
|
https://github.com/jd/tenacity/blob/354c40b7dc8e728c438668100dd020b65c84dfc6/tenacity/__init__.py#L388-L395
| null |
class Future(futures.Future):
"""Encapsulates a (future or past) attempted call to a target function."""
def __init__(self, attempt_number):
super(Future, self).__init__()
self.attempt_number = attempt_number
@property
def failed(self):
"""Return whether a exception is being held in this future."""
return self.exception() is not None
@classmethod
|
jd/tenacity
|
tenacity/before_sleep.py
|
before_sleep_log
|
python
|
def before_sleep_log(logger, log_level):
def log_it(retry_state):
if retry_state.outcome.failed:
verb, value = 'raised', retry_state.outcome.exception()
else:
verb, value = 'returned', retry_state.outcome.result()
logger.log(log_level,
"Retrying %s in %s seconds as it %s %s.",
_utils.get_callback_name(retry_state.fn),
getattr(retry_state.next_action, 'sleep'),
verb, value)
return log_it
|
Before call strategy that logs to some logger the attempt.
|
train
|
https://github.com/jd/tenacity/blob/354c40b7dc8e728c438668100dd020b65c84dfc6/tenacity/before_sleep.py#L24-L37
| null |
# Copyright 2016 Julien Danjou
# Copyright 2016 Joshua Harlow
# Copyright 2013-2014 Ray Holder
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from tenacity import _utils
def before_sleep_nothing(retry_state):
"""Before call strategy that does nothing."""
|
jd/tenacity
|
tenacity/before.py
|
before_log
|
python
|
def before_log(logger, log_level):
def log_it(retry_state):
logger.log(log_level,
"Starting call to '%s', this is the %s time calling it.",
_utils.get_callback_name(retry_state.fn),
_utils.to_ordinal(retry_state.attempt_number))
return log_it
|
Before call strategy that logs to some logger the attempt.
|
train
|
https://github.com/jd/tenacity/blob/354c40b7dc8e728c438668100dd020b65c84dfc6/tenacity/before.py#L24-L32
| null |
# Copyright 2016 Julien Danjou
# Copyright 2016 Joshua Harlow
# Copyright 2013-2014 Ray Holder
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from tenacity import _utils
def before_nothing(retry_state):
"""Before call strategy that does nothing."""
|
jd/tenacity
|
tenacity/_utils.py
|
get_callback_name
|
python
|
def get_callback_name(cb):
segments = []
try:
segments.append(cb.__qualname__)
except AttributeError:
try:
segments.append(cb.__name__)
if inspect.ismethod(cb):
try:
# This attribute doesn't exist on py3.x or newer, so
# we optionally ignore it... (on those versions of
# python `__qualname__` should have been found anyway).
segments.insert(0, cb.im_class.__name__)
except AttributeError:
pass
except AttributeError:
pass
if not segments:
return repr(cb)
else:
try:
# When running under sphinx it appears this can be none?
if cb.__module__:
segments.insert(0, cb.__module__)
except AttributeError:
pass
return ".".join(segments)
|
Get a callback fully-qualified name.
If no name can be produced ``repr(cb)`` is called and returned.
|
train
|
https://github.com/jd/tenacity/blob/354c40b7dc8e728c438668100dd020b65c84dfc6/tenacity/_utils.py#L98-L128
| null |
# Copyright 2016 Julien Danjou
# Copyright 2016 Joshua Harlow
# Copyright 2013-2014 Ray Holder
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import inspect
import sys
import time
from functools import update_wrapper
import six
# sys.maxint / 2, since Python 3.2 doesn't have a sys.maxint...
try:
MAX_WAIT = sys.maxint / 2
except AttributeError:
MAX_WAIT = 1073741823
if six.PY2:
from functools import WRAPPER_ASSIGNMENTS, WRAPPER_UPDATES
def wraps(fn):
"""Do the same as six.wraps but only copy attributes that exist.
For example, object instances don't have __name__ attribute, so
six.wraps fails. This is fixed in Python 3
(https://bugs.python.org/issue3445), but didn't get backported to six.
Also, see https://github.com/benjaminp/six/issues/250.
"""
def filter_hasattr(obj, attrs):
return tuple(a for a in attrs if hasattr(obj, a))
return six.wraps(
fn,
assigned=filter_hasattr(fn, WRAPPER_ASSIGNMENTS),
updated=filter_hasattr(fn, WRAPPER_UPDATES))
def capture(fut, tb):
# TODO(harlowja): delete this in future, since its
# has to repeatedly calculate this crap.
fut.set_exception_info(tb[1], tb[2])
def getargspec(func):
# This was deprecated in Python 3.
return inspect.getargspec(func)
else:
from functools import wraps # noqa
def capture(fut, tb):
fut.set_exception(tb[1])
def getargspec(func):
return inspect.getfullargspec(func)
def visible_attrs(obj, attrs=None):
if attrs is None:
attrs = {}
for attr_name, attr in inspect.getmembers(obj):
if attr_name.startswith("_"):
continue
attrs[attr_name] = attr
return attrs
def find_ordinal(pos_num):
# See: https://en.wikipedia.org/wiki/English_numerals#Ordinal_numbers
if pos_num == 0:
return "th"
elif pos_num == 1:
return 'st'
elif pos_num == 2:
return 'nd'
elif pos_num == 3:
return 'rd'
elif pos_num >= 4 and pos_num <= 20:
return 'th'
else:
return find_ordinal(pos_num % 10)
def to_ordinal(pos_num):
return "%i%s" % (pos_num, find_ordinal(pos_num))
try:
now = time.monotonic # noqa
except AttributeError:
from monotonic import monotonic as now # noqa
class cached_property(object):
"""A property that is computed once per instance.
Upon being computed it replaces itself with an ordinary attribute. Deleting
the attribute resets the property.
Source: https://github.com/bottlepy/bottle/blob/1de24157e74a6971d136550afe1b63eec5b0df2b/bottle.py#L234-L246
""" # noqa: E501
def __init__(self, func):
update_wrapper(self, func)
self.func = func
def __get__(self, obj, cls):
if obj is None:
return self
value = obj.__dict__[self.func.__name__] = self.func(obj)
return value
|
jd/tenacity
|
tenacity/compat.py
|
make_retry_state
|
python
|
def make_retry_state(previous_attempt_number, delay_since_first_attempt,
last_result=None):
required_parameter_unset = (previous_attempt_number is _unset or
delay_since_first_attempt is _unset)
if required_parameter_unset:
raise _make_unset_exception(
'wait/stop',
previous_attempt_number=previous_attempt_number,
delay_since_first_attempt=delay_since_first_attempt)
from tenacity import RetryCallState
retry_state = RetryCallState(None, None, (), {})
retry_state.attempt_number = previous_attempt_number
if last_result is not None:
retry_state.outcome = last_result
else:
retry_state.set_result(None)
_set_delay_since_start(retry_state, delay_since_first_attempt)
return retry_state
|
Construct RetryCallState for given attempt number & delay.
Only used in testing and thus is extra careful about timestamp arithmetics.
|
train
|
https://github.com/jd/tenacity/blob/354c40b7dc8e728c438668100dd020b65c84dfc6/tenacity/compat.py#L57-L79
| null |
"""Utilities for providing backward compatibility."""
import inspect
from fractions import Fraction
from warnings import warn
import six
from tenacity import _utils
def warn_about_non_retry_state_deprecation(cbname, func, stacklevel):
msg = (
'"%s" function must accept single "retry_state" parameter,'
' please update %s' % (cbname, _utils.get_callback_name(func)))
warn(msg, DeprecationWarning, stacklevel=stacklevel + 1)
def warn_about_dunder_non_retry_state_deprecation(fn, stacklevel):
msg = (
'"%s" method must be called with'
' single "retry_state" parameter' % (_utils.get_callback_name(fn)))
warn(msg, DeprecationWarning, stacklevel=stacklevel + 1)
def func_takes_retry_state(func):
if not six.callable(func):
raise Exception(func)
return False
if not inspect.isfunction(func) and not inspect.ismethod(func):
# func is a callable object rather than a function/method
func = func.__call__
func_spec = _utils.getargspec(func)
return 'retry_state' in func_spec.args
_unset = object()
def _make_unset_exception(func_name, **kwargs):
missing = []
for k, v in kwargs.iteritems():
if v is _unset:
missing.append(k)
missing_str = ', '.join(repr(s) for s in missing)
return TypeError(func_name + ' func missing parameters: ' + missing_str)
def _set_delay_since_start(retry_state, delay):
# Ensure outcome_timestamp - start_time is *exactly* equal to the delay to
# avoid complexity in test code.
retry_state.start_time = Fraction(retry_state.start_time)
retry_state.outcome_timestamp = (retry_state.start_time + Fraction(delay))
assert retry_state.seconds_since_start == delay
def func_takes_last_result(waiter):
"""Check if function has a "last_result" parameter.
Needed to provide backward compatibility for wait functions that didn't
take "last_result" in the beginning.
"""
if not six.callable(waiter):
return False
if not inspect.isfunction(waiter) and not inspect.ismethod(waiter):
# waiter is a class, check dunder-call rather than dunder-init.
waiter = waiter.__call__
waiter_spec = _utils.getargspec(waiter)
return 'last_result' in waiter_spec.args
def stop_dunder_call_accept_old_params(fn):
"""Decorate cls.__call__ method to accept old "stop" signature."""
@_utils.wraps(fn)
def new_fn(self,
previous_attempt_number=_unset,
delay_since_first_attempt=_unset,
retry_state=None):
if retry_state is None:
from tenacity import RetryCallState
retry_state_passed_as_non_kwarg = (
previous_attempt_number is not _unset and
isinstance(previous_attempt_number, RetryCallState))
if retry_state_passed_as_non_kwarg:
retry_state = previous_attempt_number
else:
warn_about_dunder_non_retry_state_deprecation(fn, stacklevel=2)
retry_state = make_retry_state(
previous_attempt_number=previous_attempt_number,
delay_since_first_attempt=delay_since_first_attempt)
return fn(self, retry_state=retry_state)
return new_fn
def stop_func_accept_retry_state(stop_func):
"""Wrap "stop" function to accept "retry_state" parameter."""
if not six.callable(stop_func):
return stop_func
if func_takes_retry_state(stop_func):
return stop_func
@_utils.wraps(stop_func)
def wrapped_stop_func(retry_state):
warn_about_non_retry_state_deprecation(
'stop', stop_func, stacklevel=4)
return stop_func(
retry_state.attempt_number,
retry_state.seconds_since_start,
)
return wrapped_stop_func
def wait_dunder_call_accept_old_params(fn):
"""Decorate cls.__call__ method to accept old "wait" signature."""
@_utils.wraps(fn)
def new_fn(self,
previous_attempt_number=_unset,
delay_since_first_attempt=_unset,
last_result=None,
retry_state=None):
if retry_state is None:
from tenacity import RetryCallState
retry_state_passed_as_non_kwarg = (
previous_attempt_number is not _unset and
isinstance(previous_attempt_number, RetryCallState))
if retry_state_passed_as_non_kwarg:
retry_state = previous_attempt_number
else:
warn_about_dunder_non_retry_state_deprecation(fn, stacklevel=2)
retry_state = make_retry_state(
previous_attempt_number=previous_attempt_number,
delay_since_first_attempt=delay_since_first_attempt,
last_result=last_result)
return fn(self, retry_state=retry_state)
return new_fn
def wait_func_accept_retry_state(wait_func):
"""Wrap wait function to accept "retry_state" parameter."""
if not six.callable(wait_func):
return wait_func
if func_takes_retry_state(wait_func):
return wait_func
if func_takes_last_result(wait_func):
@_utils.wraps(wait_func)
def wrapped_wait_func(retry_state):
warn_about_non_retry_state_deprecation(
'wait', wait_func, stacklevel=4)
return wait_func(
retry_state.attempt_number,
retry_state.seconds_since_start,
last_result=retry_state.outcome,
)
else:
@_utils.wraps(wait_func)
def wrapped_wait_func(retry_state):
warn_about_non_retry_state_deprecation(
'wait', wait_func, stacklevel=4)
return wait_func(
retry_state.attempt_number,
retry_state.seconds_since_start,
)
return wrapped_wait_func
def retry_dunder_call_accept_old_params(fn):
"""Decorate cls.__call__ method to accept old "retry" signature."""
@_utils.wraps(fn)
def new_fn(self, attempt=_unset, retry_state=None):
if retry_state is None:
from tenacity import RetryCallState
if attempt is _unset:
raise _make_unset_exception('retry', attempt=attempt)
retry_state_passed_as_non_kwarg = (
attempt is not _unset and
isinstance(attempt, RetryCallState))
if retry_state_passed_as_non_kwarg:
retry_state = attempt
else:
warn_about_dunder_non_retry_state_deprecation(fn, stacklevel=2)
retry_state = RetryCallState(None, None, (), {})
retry_state.outcome = attempt
return fn(self, retry_state=retry_state)
return new_fn
def retry_func_accept_retry_state(retry_func):
"""Wrap "retry" function to accept "retry_state" parameter."""
if not six.callable(retry_func):
return retry_func
if func_takes_retry_state(retry_func):
return retry_func
@_utils.wraps(retry_func)
def wrapped_retry_func(retry_state):
warn_about_non_retry_state_deprecation(
'retry', retry_func, stacklevel=4)
return retry_func(retry_state.outcome)
return wrapped_retry_func
def before_func_accept_retry_state(fn):
"""Wrap "before" function to accept "retry_state"."""
if not six.callable(fn):
return fn
if func_takes_retry_state(fn):
return fn
@_utils.wraps(fn)
def wrapped_before_func(retry_state):
# func, trial_number, trial_time_taken
warn_about_non_retry_state_deprecation('before', fn, stacklevel=4)
return fn(
retry_state.fn,
retry_state.attempt_number,
)
return wrapped_before_func
def after_func_accept_retry_state(fn):
"""Wrap "after" function to accept "retry_state"."""
if not six.callable(fn):
return fn
if func_takes_retry_state(fn):
return fn
@_utils.wraps(fn)
def wrapped_after_sleep_func(retry_state):
# func, trial_number, trial_time_taken
warn_about_non_retry_state_deprecation('after', fn, stacklevel=4)
return fn(
retry_state.fn,
retry_state.attempt_number,
retry_state.seconds_since_start)
return wrapped_after_sleep_func
def before_sleep_func_accept_retry_state(fn):
"""Wrap "before_sleep" function to accept "retry_state"."""
if not six.callable(fn):
return fn
if func_takes_retry_state(fn):
return fn
@_utils.wraps(fn)
def wrapped_before_sleep_func(retry_state):
# retry_object, sleep, last_result
warn_about_non_retry_state_deprecation(
'before_sleep', fn, stacklevel=4)
return fn(
retry_state.retry_object,
sleep=getattr(retry_state.next_action, 'sleep'),
last_result=retry_state.outcome)
return wrapped_before_sleep_func
def retry_error_callback_accept_retry_state(fn):
if not six.callable(fn):
return fn
if func_takes_retry_state(fn):
return fn
@_utils.wraps(fn)
def wrapped_retry_error_callback(retry_state):
warn_about_non_retry_state_deprecation(
'retry_error_callback', fn, stacklevel=4)
return fn(retry_state.outcome)
return wrapped_retry_error_callback
|
jd/tenacity
|
tenacity/compat.py
|
func_takes_last_result
|
python
|
def func_takes_last_result(waiter):
if not six.callable(waiter):
return False
if not inspect.isfunction(waiter) and not inspect.ismethod(waiter):
# waiter is a class, check dunder-call rather than dunder-init.
waiter = waiter.__call__
waiter_spec = _utils.getargspec(waiter)
return 'last_result' in waiter_spec.args
|
Check if function has a "last_result" parameter.
Needed to provide backward compatibility for wait functions that didn't
take "last_result" in the beginning.
|
train
|
https://github.com/jd/tenacity/blob/354c40b7dc8e728c438668100dd020b65c84dfc6/tenacity/compat.py#L82-L94
|
[
"def getargspec(func):\n return inspect.getfullargspec(func)\n"
] |
"""Utilities for providing backward compatibility."""
import inspect
from fractions import Fraction
from warnings import warn
import six
from tenacity import _utils
def warn_about_non_retry_state_deprecation(cbname, func, stacklevel):
msg = (
'"%s" function must accept single "retry_state" parameter,'
' please update %s' % (cbname, _utils.get_callback_name(func)))
warn(msg, DeprecationWarning, stacklevel=stacklevel + 1)
def warn_about_dunder_non_retry_state_deprecation(fn, stacklevel):
msg = (
'"%s" method must be called with'
' single "retry_state" parameter' % (_utils.get_callback_name(fn)))
warn(msg, DeprecationWarning, stacklevel=stacklevel + 1)
def func_takes_retry_state(func):
if not six.callable(func):
raise Exception(func)
return False
if not inspect.isfunction(func) and not inspect.ismethod(func):
# func is a callable object rather than a function/method
func = func.__call__
func_spec = _utils.getargspec(func)
return 'retry_state' in func_spec.args
_unset = object()
def _make_unset_exception(func_name, **kwargs):
missing = []
for k, v in kwargs.iteritems():
if v is _unset:
missing.append(k)
missing_str = ', '.join(repr(s) for s in missing)
return TypeError(func_name + ' func missing parameters: ' + missing_str)
def _set_delay_since_start(retry_state, delay):
# Ensure outcome_timestamp - start_time is *exactly* equal to the delay to
# avoid complexity in test code.
retry_state.start_time = Fraction(retry_state.start_time)
retry_state.outcome_timestamp = (retry_state.start_time + Fraction(delay))
assert retry_state.seconds_since_start == delay
def make_retry_state(previous_attempt_number, delay_since_first_attempt,
last_result=None):
"""Construct RetryCallState for given attempt number & delay.
Only used in testing and thus is extra careful about timestamp arithmetics.
"""
required_parameter_unset = (previous_attempt_number is _unset or
delay_since_first_attempt is _unset)
if required_parameter_unset:
raise _make_unset_exception(
'wait/stop',
previous_attempt_number=previous_attempt_number,
delay_since_first_attempt=delay_since_first_attempt)
from tenacity import RetryCallState
retry_state = RetryCallState(None, None, (), {})
retry_state.attempt_number = previous_attempt_number
if last_result is not None:
retry_state.outcome = last_result
else:
retry_state.set_result(None)
_set_delay_since_start(retry_state, delay_since_first_attempt)
return retry_state
def stop_dunder_call_accept_old_params(fn):
"""Decorate cls.__call__ method to accept old "stop" signature."""
@_utils.wraps(fn)
def new_fn(self,
previous_attempt_number=_unset,
delay_since_first_attempt=_unset,
retry_state=None):
if retry_state is None:
from tenacity import RetryCallState
retry_state_passed_as_non_kwarg = (
previous_attempt_number is not _unset and
isinstance(previous_attempt_number, RetryCallState))
if retry_state_passed_as_non_kwarg:
retry_state = previous_attempt_number
else:
warn_about_dunder_non_retry_state_deprecation(fn, stacklevel=2)
retry_state = make_retry_state(
previous_attempt_number=previous_attempt_number,
delay_since_first_attempt=delay_since_first_attempt)
return fn(self, retry_state=retry_state)
return new_fn
def stop_func_accept_retry_state(stop_func):
"""Wrap "stop" function to accept "retry_state" parameter."""
if not six.callable(stop_func):
return stop_func
if func_takes_retry_state(stop_func):
return stop_func
@_utils.wraps(stop_func)
def wrapped_stop_func(retry_state):
warn_about_non_retry_state_deprecation(
'stop', stop_func, stacklevel=4)
return stop_func(
retry_state.attempt_number,
retry_state.seconds_since_start,
)
return wrapped_stop_func
def wait_dunder_call_accept_old_params(fn):
"""Decorate cls.__call__ method to accept old "wait" signature."""
@_utils.wraps(fn)
def new_fn(self,
previous_attempt_number=_unset,
delay_since_first_attempt=_unset,
last_result=None,
retry_state=None):
if retry_state is None:
from tenacity import RetryCallState
retry_state_passed_as_non_kwarg = (
previous_attempt_number is not _unset and
isinstance(previous_attempt_number, RetryCallState))
if retry_state_passed_as_non_kwarg:
retry_state = previous_attempt_number
else:
warn_about_dunder_non_retry_state_deprecation(fn, stacklevel=2)
retry_state = make_retry_state(
previous_attempt_number=previous_attempt_number,
delay_since_first_attempt=delay_since_first_attempt,
last_result=last_result)
return fn(self, retry_state=retry_state)
return new_fn
def wait_func_accept_retry_state(wait_func):
"""Wrap wait function to accept "retry_state" parameter."""
if not six.callable(wait_func):
return wait_func
if func_takes_retry_state(wait_func):
return wait_func
if func_takes_last_result(wait_func):
@_utils.wraps(wait_func)
def wrapped_wait_func(retry_state):
warn_about_non_retry_state_deprecation(
'wait', wait_func, stacklevel=4)
return wait_func(
retry_state.attempt_number,
retry_state.seconds_since_start,
last_result=retry_state.outcome,
)
else:
@_utils.wraps(wait_func)
def wrapped_wait_func(retry_state):
warn_about_non_retry_state_deprecation(
'wait', wait_func, stacklevel=4)
return wait_func(
retry_state.attempt_number,
retry_state.seconds_since_start,
)
return wrapped_wait_func
def retry_dunder_call_accept_old_params(fn):
"""Decorate cls.__call__ method to accept old "retry" signature."""
@_utils.wraps(fn)
def new_fn(self, attempt=_unset, retry_state=None):
if retry_state is None:
from tenacity import RetryCallState
if attempt is _unset:
raise _make_unset_exception('retry', attempt=attempt)
retry_state_passed_as_non_kwarg = (
attempt is not _unset and
isinstance(attempt, RetryCallState))
if retry_state_passed_as_non_kwarg:
retry_state = attempt
else:
warn_about_dunder_non_retry_state_deprecation(fn, stacklevel=2)
retry_state = RetryCallState(None, None, (), {})
retry_state.outcome = attempt
return fn(self, retry_state=retry_state)
return new_fn
def retry_func_accept_retry_state(retry_func):
"""Wrap "retry" function to accept "retry_state" parameter."""
if not six.callable(retry_func):
return retry_func
if func_takes_retry_state(retry_func):
return retry_func
@_utils.wraps(retry_func)
def wrapped_retry_func(retry_state):
warn_about_non_retry_state_deprecation(
'retry', retry_func, stacklevel=4)
return retry_func(retry_state.outcome)
return wrapped_retry_func
def before_func_accept_retry_state(fn):
"""Wrap "before" function to accept "retry_state"."""
if not six.callable(fn):
return fn
if func_takes_retry_state(fn):
return fn
@_utils.wraps(fn)
def wrapped_before_func(retry_state):
# func, trial_number, trial_time_taken
warn_about_non_retry_state_deprecation('before', fn, stacklevel=4)
return fn(
retry_state.fn,
retry_state.attempt_number,
)
return wrapped_before_func
def after_func_accept_retry_state(fn):
"""Wrap "after" function to accept "retry_state"."""
if not six.callable(fn):
return fn
if func_takes_retry_state(fn):
return fn
@_utils.wraps(fn)
def wrapped_after_sleep_func(retry_state):
# func, trial_number, trial_time_taken
warn_about_non_retry_state_deprecation('after', fn, stacklevel=4)
return fn(
retry_state.fn,
retry_state.attempt_number,
retry_state.seconds_since_start)
return wrapped_after_sleep_func
def before_sleep_func_accept_retry_state(fn):
"""Wrap "before_sleep" function to accept "retry_state"."""
if not six.callable(fn):
return fn
if func_takes_retry_state(fn):
return fn
@_utils.wraps(fn)
def wrapped_before_sleep_func(retry_state):
# retry_object, sleep, last_result
warn_about_non_retry_state_deprecation(
'before_sleep', fn, stacklevel=4)
return fn(
retry_state.retry_object,
sleep=getattr(retry_state.next_action, 'sleep'),
last_result=retry_state.outcome)
return wrapped_before_sleep_func
def retry_error_callback_accept_retry_state(fn):
if not six.callable(fn):
return fn
if func_takes_retry_state(fn):
return fn
@_utils.wraps(fn)
def wrapped_retry_error_callback(retry_state):
warn_about_non_retry_state_deprecation(
'retry_error_callback', fn, stacklevel=4)
return fn(retry_state.outcome)
return wrapped_retry_error_callback
|
jd/tenacity
|
tenacity/compat.py
|
stop_dunder_call_accept_old_params
|
python
|
def stop_dunder_call_accept_old_params(fn):
@_utils.wraps(fn)
def new_fn(self,
previous_attempt_number=_unset,
delay_since_first_attempt=_unset,
retry_state=None):
if retry_state is None:
from tenacity import RetryCallState
retry_state_passed_as_non_kwarg = (
previous_attempt_number is not _unset and
isinstance(previous_attempt_number, RetryCallState))
if retry_state_passed_as_non_kwarg:
retry_state = previous_attempt_number
else:
warn_about_dunder_non_retry_state_deprecation(fn, stacklevel=2)
retry_state = make_retry_state(
previous_attempt_number=previous_attempt_number,
delay_since_first_attempt=delay_since_first_attempt)
return fn(self, retry_state=retry_state)
return new_fn
|
Decorate cls.__call__ method to accept old "stop" signature.
|
train
|
https://github.com/jd/tenacity/blob/354c40b7dc8e728c438668100dd020b65c84dfc6/tenacity/compat.py#L97-L117
| null |
"""Utilities for providing backward compatibility."""
import inspect
from fractions import Fraction
from warnings import warn
import six
from tenacity import _utils
def warn_about_non_retry_state_deprecation(cbname, func, stacklevel):
msg = (
'"%s" function must accept single "retry_state" parameter,'
' please update %s' % (cbname, _utils.get_callback_name(func)))
warn(msg, DeprecationWarning, stacklevel=stacklevel + 1)
def warn_about_dunder_non_retry_state_deprecation(fn, stacklevel):
msg = (
'"%s" method must be called with'
' single "retry_state" parameter' % (_utils.get_callback_name(fn)))
warn(msg, DeprecationWarning, stacklevel=stacklevel + 1)
def func_takes_retry_state(func):
if not six.callable(func):
raise Exception(func)
return False
if not inspect.isfunction(func) and not inspect.ismethod(func):
# func is a callable object rather than a function/method
func = func.__call__
func_spec = _utils.getargspec(func)
return 'retry_state' in func_spec.args
_unset = object()
def _make_unset_exception(func_name, **kwargs):
missing = []
for k, v in kwargs.iteritems():
if v is _unset:
missing.append(k)
missing_str = ', '.join(repr(s) for s in missing)
return TypeError(func_name + ' func missing parameters: ' + missing_str)
def _set_delay_since_start(retry_state, delay):
# Ensure outcome_timestamp - start_time is *exactly* equal to the delay to
# avoid complexity in test code.
retry_state.start_time = Fraction(retry_state.start_time)
retry_state.outcome_timestamp = (retry_state.start_time + Fraction(delay))
assert retry_state.seconds_since_start == delay
def make_retry_state(previous_attempt_number, delay_since_first_attempt,
last_result=None):
"""Construct RetryCallState for given attempt number & delay.
Only used in testing and thus is extra careful about timestamp arithmetics.
"""
required_parameter_unset = (previous_attempt_number is _unset or
delay_since_first_attempt is _unset)
if required_parameter_unset:
raise _make_unset_exception(
'wait/stop',
previous_attempt_number=previous_attempt_number,
delay_since_first_attempt=delay_since_first_attempt)
from tenacity import RetryCallState
retry_state = RetryCallState(None, None, (), {})
retry_state.attempt_number = previous_attempt_number
if last_result is not None:
retry_state.outcome = last_result
else:
retry_state.set_result(None)
_set_delay_since_start(retry_state, delay_since_first_attempt)
return retry_state
def func_takes_last_result(waiter):
"""Check if function has a "last_result" parameter.
Needed to provide backward compatibility for wait functions that didn't
take "last_result" in the beginning.
"""
if not six.callable(waiter):
return False
if not inspect.isfunction(waiter) and not inspect.ismethod(waiter):
# waiter is a class, check dunder-call rather than dunder-init.
waiter = waiter.__call__
waiter_spec = _utils.getargspec(waiter)
return 'last_result' in waiter_spec.args
def stop_func_accept_retry_state(stop_func):
"""Wrap "stop" function to accept "retry_state" parameter."""
if not six.callable(stop_func):
return stop_func
if func_takes_retry_state(stop_func):
return stop_func
@_utils.wraps(stop_func)
def wrapped_stop_func(retry_state):
warn_about_non_retry_state_deprecation(
'stop', stop_func, stacklevel=4)
return stop_func(
retry_state.attempt_number,
retry_state.seconds_since_start,
)
return wrapped_stop_func
def wait_dunder_call_accept_old_params(fn):
"""Decorate cls.__call__ method to accept old "wait" signature."""
@_utils.wraps(fn)
def new_fn(self,
previous_attempt_number=_unset,
delay_since_first_attempt=_unset,
last_result=None,
retry_state=None):
if retry_state is None:
from tenacity import RetryCallState
retry_state_passed_as_non_kwarg = (
previous_attempt_number is not _unset and
isinstance(previous_attempt_number, RetryCallState))
if retry_state_passed_as_non_kwarg:
retry_state = previous_attempt_number
else:
warn_about_dunder_non_retry_state_deprecation(fn, stacklevel=2)
retry_state = make_retry_state(
previous_attempt_number=previous_attempt_number,
delay_since_first_attempt=delay_since_first_attempt,
last_result=last_result)
return fn(self, retry_state=retry_state)
return new_fn
def wait_func_accept_retry_state(wait_func):
"""Wrap wait function to accept "retry_state" parameter."""
if not six.callable(wait_func):
return wait_func
if func_takes_retry_state(wait_func):
return wait_func
if func_takes_last_result(wait_func):
@_utils.wraps(wait_func)
def wrapped_wait_func(retry_state):
warn_about_non_retry_state_deprecation(
'wait', wait_func, stacklevel=4)
return wait_func(
retry_state.attempt_number,
retry_state.seconds_since_start,
last_result=retry_state.outcome,
)
else:
@_utils.wraps(wait_func)
def wrapped_wait_func(retry_state):
warn_about_non_retry_state_deprecation(
'wait', wait_func, stacklevel=4)
return wait_func(
retry_state.attempt_number,
retry_state.seconds_since_start,
)
return wrapped_wait_func
def retry_dunder_call_accept_old_params(fn):
"""Decorate cls.__call__ method to accept old "retry" signature."""
@_utils.wraps(fn)
def new_fn(self, attempt=_unset, retry_state=None):
if retry_state is None:
from tenacity import RetryCallState
if attempt is _unset:
raise _make_unset_exception('retry', attempt=attempt)
retry_state_passed_as_non_kwarg = (
attempt is not _unset and
isinstance(attempt, RetryCallState))
if retry_state_passed_as_non_kwarg:
retry_state = attempt
else:
warn_about_dunder_non_retry_state_deprecation(fn, stacklevel=2)
retry_state = RetryCallState(None, None, (), {})
retry_state.outcome = attempt
return fn(self, retry_state=retry_state)
return new_fn
def retry_func_accept_retry_state(retry_func):
"""Wrap "retry" function to accept "retry_state" parameter."""
if not six.callable(retry_func):
return retry_func
if func_takes_retry_state(retry_func):
return retry_func
@_utils.wraps(retry_func)
def wrapped_retry_func(retry_state):
warn_about_non_retry_state_deprecation(
'retry', retry_func, stacklevel=4)
return retry_func(retry_state.outcome)
return wrapped_retry_func
def before_func_accept_retry_state(fn):
"""Wrap "before" function to accept "retry_state"."""
if not six.callable(fn):
return fn
if func_takes_retry_state(fn):
return fn
@_utils.wraps(fn)
def wrapped_before_func(retry_state):
# func, trial_number, trial_time_taken
warn_about_non_retry_state_deprecation('before', fn, stacklevel=4)
return fn(
retry_state.fn,
retry_state.attempt_number,
)
return wrapped_before_func
def after_func_accept_retry_state(fn):
"""Wrap "after" function to accept "retry_state"."""
if not six.callable(fn):
return fn
if func_takes_retry_state(fn):
return fn
@_utils.wraps(fn)
def wrapped_after_sleep_func(retry_state):
# func, trial_number, trial_time_taken
warn_about_non_retry_state_deprecation('after', fn, stacklevel=4)
return fn(
retry_state.fn,
retry_state.attempt_number,
retry_state.seconds_since_start)
return wrapped_after_sleep_func
def before_sleep_func_accept_retry_state(fn):
"""Wrap "before_sleep" function to accept "retry_state"."""
if not six.callable(fn):
return fn
if func_takes_retry_state(fn):
return fn
@_utils.wraps(fn)
def wrapped_before_sleep_func(retry_state):
# retry_object, sleep, last_result
warn_about_non_retry_state_deprecation(
'before_sleep', fn, stacklevel=4)
return fn(
retry_state.retry_object,
sleep=getattr(retry_state.next_action, 'sleep'),
last_result=retry_state.outcome)
return wrapped_before_sleep_func
def retry_error_callback_accept_retry_state(fn):
if not six.callable(fn):
return fn
if func_takes_retry_state(fn):
return fn
@_utils.wraps(fn)
def wrapped_retry_error_callback(retry_state):
warn_about_non_retry_state_deprecation(
'retry_error_callback', fn, stacklevel=4)
return fn(retry_state.outcome)
return wrapped_retry_error_callback
|
jd/tenacity
|
tenacity/compat.py
|
stop_func_accept_retry_state
|
python
|
def stop_func_accept_retry_state(stop_func):
if not six.callable(stop_func):
return stop_func
if func_takes_retry_state(stop_func):
return stop_func
@_utils.wraps(stop_func)
def wrapped_stop_func(retry_state):
warn_about_non_retry_state_deprecation(
'stop', stop_func, stacklevel=4)
return stop_func(
retry_state.attempt_number,
retry_state.seconds_since_start,
)
return wrapped_stop_func
|
Wrap "stop" function to accept "retry_state" parameter.
|
train
|
https://github.com/jd/tenacity/blob/354c40b7dc8e728c438668100dd020b65c84dfc6/tenacity/compat.py#L120-L136
|
[
"def func_takes_retry_state(func):\n if not six.callable(func):\n raise Exception(func)\n return False\n if not inspect.isfunction(func) and not inspect.ismethod(func):\n # func is a callable object rather than a function/method\n func = func.__call__\n func_spec = _utils.getargspec(func)\n return 'retry_state' in func_spec.args\n"
] |
"""Utilities for providing backward compatibility."""
import inspect
from fractions import Fraction
from warnings import warn
import six
from tenacity import _utils
def warn_about_non_retry_state_deprecation(cbname, func, stacklevel):
msg = (
'"%s" function must accept single "retry_state" parameter,'
' please update %s' % (cbname, _utils.get_callback_name(func)))
warn(msg, DeprecationWarning, stacklevel=stacklevel + 1)
def warn_about_dunder_non_retry_state_deprecation(fn, stacklevel):
msg = (
'"%s" method must be called with'
' single "retry_state" parameter' % (_utils.get_callback_name(fn)))
warn(msg, DeprecationWarning, stacklevel=stacklevel + 1)
def func_takes_retry_state(func):
if not six.callable(func):
raise Exception(func)
return False
if not inspect.isfunction(func) and not inspect.ismethod(func):
# func is a callable object rather than a function/method
func = func.__call__
func_spec = _utils.getargspec(func)
return 'retry_state' in func_spec.args
_unset = object()
def _make_unset_exception(func_name, **kwargs):
missing = []
for k, v in kwargs.iteritems():
if v is _unset:
missing.append(k)
missing_str = ', '.join(repr(s) for s in missing)
return TypeError(func_name + ' func missing parameters: ' + missing_str)
def _set_delay_since_start(retry_state, delay):
# Ensure outcome_timestamp - start_time is *exactly* equal to the delay to
# avoid complexity in test code.
retry_state.start_time = Fraction(retry_state.start_time)
retry_state.outcome_timestamp = (retry_state.start_time + Fraction(delay))
assert retry_state.seconds_since_start == delay
def make_retry_state(previous_attempt_number, delay_since_first_attempt,
last_result=None):
"""Construct RetryCallState for given attempt number & delay.
Only used in testing and thus is extra careful about timestamp arithmetics.
"""
required_parameter_unset = (previous_attempt_number is _unset or
delay_since_first_attempt is _unset)
if required_parameter_unset:
raise _make_unset_exception(
'wait/stop',
previous_attempt_number=previous_attempt_number,
delay_since_first_attempt=delay_since_first_attempt)
from tenacity import RetryCallState
retry_state = RetryCallState(None, None, (), {})
retry_state.attempt_number = previous_attempt_number
if last_result is not None:
retry_state.outcome = last_result
else:
retry_state.set_result(None)
_set_delay_since_start(retry_state, delay_since_first_attempt)
return retry_state
def func_takes_last_result(waiter):
"""Check if function has a "last_result" parameter.
Needed to provide backward compatibility for wait functions that didn't
take "last_result" in the beginning.
"""
if not six.callable(waiter):
return False
if not inspect.isfunction(waiter) and not inspect.ismethod(waiter):
# waiter is a class, check dunder-call rather than dunder-init.
waiter = waiter.__call__
waiter_spec = _utils.getargspec(waiter)
return 'last_result' in waiter_spec.args
def stop_dunder_call_accept_old_params(fn):
"""Decorate cls.__call__ method to accept old "stop" signature."""
@_utils.wraps(fn)
def new_fn(self,
previous_attempt_number=_unset,
delay_since_first_attempt=_unset,
retry_state=None):
if retry_state is None:
from tenacity import RetryCallState
retry_state_passed_as_non_kwarg = (
previous_attempt_number is not _unset and
isinstance(previous_attempt_number, RetryCallState))
if retry_state_passed_as_non_kwarg:
retry_state = previous_attempt_number
else:
warn_about_dunder_non_retry_state_deprecation(fn, stacklevel=2)
retry_state = make_retry_state(
previous_attempt_number=previous_attempt_number,
delay_since_first_attempt=delay_since_first_attempt)
return fn(self, retry_state=retry_state)
return new_fn
def wait_dunder_call_accept_old_params(fn):
"""Decorate cls.__call__ method to accept old "wait" signature."""
@_utils.wraps(fn)
def new_fn(self,
previous_attempt_number=_unset,
delay_since_first_attempt=_unset,
last_result=None,
retry_state=None):
if retry_state is None:
from tenacity import RetryCallState
retry_state_passed_as_non_kwarg = (
previous_attempt_number is not _unset and
isinstance(previous_attempt_number, RetryCallState))
if retry_state_passed_as_non_kwarg:
retry_state = previous_attempt_number
else:
warn_about_dunder_non_retry_state_deprecation(fn, stacklevel=2)
retry_state = make_retry_state(
previous_attempt_number=previous_attempt_number,
delay_since_first_attempt=delay_since_first_attempt,
last_result=last_result)
return fn(self, retry_state=retry_state)
return new_fn
def wait_func_accept_retry_state(wait_func):
"""Wrap wait function to accept "retry_state" parameter."""
if not six.callable(wait_func):
return wait_func
if func_takes_retry_state(wait_func):
return wait_func
if func_takes_last_result(wait_func):
@_utils.wraps(wait_func)
def wrapped_wait_func(retry_state):
warn_about_non_retry_state_deprecation(
'wait', wait_func, stacklevel=4)
return wait_func(
retry_state.attempt_number,
retry_state.seconds_since_start,
last_result=retry_state.outcome,
)
else:
@_utils.wraps(wait_func)
def wrapped_wait_func(retry_state):
warn_about_non_retry_state_deprecation(
'wait', wait_func, stacklevel=4)
return wait_func(
retry_state.attempt_number,
retry_state.seconds_since_start,
)
return wrapped_wait_func
def retry_dunder_call_accept_old_params(fn):
"""Decorate cls.__call__ method to accept old "retry" signature."""
@_utils.wraps(fn)
def new_fn(self, attempt=_unset, retry_state=None):
if retry_state is None:
from tenacity import RetryCallState
if attempt is _unset:
raise _make_unset_exception('retry', attempt=attempt)
retry_state_passed_as_non_kwarg = (
attempt is not _unset and
isinstance(attempt, RetryCallState))
if retry_state_passed_as_non_kwarg:
retry_state = attempt
else:
warn_about_dunder_non_retry_state_deprecation(fn, stacklevel=2)
retry_state = RetryCallState(None, None, (), {})
retry_state.outcome = attempt
return fn(self, retry_state=retry_state)
return new_fn
def retry_func_accept_retry_state(retry_func):
"""Wrap "retry" function to accept "retry_state" parameter."""
if not six.callable(retry_func):
return retry_func
if func_takes_retry_state(retry_func):
return retry_func
@_utils.wraps(retry_func)
def wrapped_retry_func(retry_state):
warn_about_non_retry_state_deprecation(
'retry', retry_func, stacklevel=4)
return retry_func(retry_state.outcome)
return wrapped_retry_func
def before_func_accept_retry_state(fn):
"""Wrap "before" function to accept "retry_state"."""
if not six.callable(fn):
return fn
if func_takes_retry_state(fn):
return fn
@_utils.wraps(fn)
def wrapped_before_func(retry_state):
# func, trial_number, trial_time_taken
warn_about_non_retry_state_deprecation('before', fn, stacklevel=4)
return fn(
retry_state.fn,
retry_state.attempt_number,
)
return wrapped_before_func
def after_func_accept_retry_state(fn):
"""Wrap "after" function to accept "retry_state"."""
if not six.callable(fn):
return fn
if func_takes_retry_state(fn):
return fn
@_utils.wraps(fn)
def wrapped_after_sleep_func(retry_state):
# func, trial_number, trial_time_taken
warn_about_non_retry_state_deprecation('after', fn, stacklevel=4)
return fn(
retry_state.fn,
retry_state.attempt_number,
retry_state.seconds_since_start)
return wrapped_after_sleep_func
def before_sleep_func_accept_retry_state(fn):
"""Wrap "before_sleep" function to accept "retry_state"."""
if not six.callable(fn):
return fn
if func_takes_retry_state(fn):
return fn
@_utils.wraps(fn)
def wrapped_before_sleep_func(retry_state):
# retry_object, sleep, last_result
warn_about_non_retry_state_deprecation(
'before_sleep', fn, stacklevel=4)
return fn(
retry_state.retry_object,
sleep=getattr(retry_state.next_action, 'sleep'),
last_result=retry_state.outcome)
return wrapped_before_sleep_func
def retry_error_callback_accept_retry_state(fn):
if not six.callable(fn):
return fn
if func_takes_retry_state(fn):
return fn
@_utils.wraps(fn)
def wrapped_retry_error_callback(retry_state):
warn_about_non_retry_state_deprecation(
'retry_error_callback', fn, stacklevel=4)
return fn(retry_state.outcome)
return wrapped_retry_error_callback
|
jd/tenacity
|
tenacity/compat.py
|
wait_func_accept_retry_state
|
python
|
def wait_func_accept_retry_state(wait_func):
if not six.callable(wait_func):
return wait_func
if func_takes_retry_state(wait_func):
return wait_func
if func_takes_last_result(wait_func):
@_utils.wraps(wait_func)
def wrapped_wait_func(retry_state):
warn_about_non_retry_state_deprecation(
'wait', wait_func, stacklevel=4)
return wait_func(
retry_state.attempt_number,
retry_state.seconds_since_start,
last_result=retry_state.outcome,
)
else:
@_utils.wraps(wait_func)
def wrapped_wait_func(retry_state):
warn_about_non_retry_state_deprecation(
'wait', wait_func, stacklevel=4)
return wait_func(
retry_state.attempt_number,
retry_state.seconds_since_start,
)
return wrapped_wait_func
|
Wrap wait function to accept "retry_state" parameter.
|
train
|
https://github.com/jd/tenacity/blob/354c40b7dc8e728c438668100dd020b65c84dfc6/tenacity/compat.py#L164-L191
|
[
"def func_takes_retry_state(func):\n if not six.callable(func):\n raise Exception(func)\n return False\n if not inspect.isfunction(func) and not inspect.ismethod(func):\n # func is a callable object rather than a function/method\n func = func.__call__\n func_spec = _utils.getargspec(func)\n return 'retry_state' in func_spec.args\n",
"def func_takes_last_result(waiter):\n \"\"\"Check if function has a \"last_result\" parameter.\n\n Needed to provide backward compatibility for wait functions that didn't\n take \"last_result\" in the beginning.\n \"\"\"\n if not six.callable(waiter):\n return False\n if not inspect.isfunction(waiter) and not inspect.ismethod(waiter):\n # waiter is a class, check dunder-call rather than dunder-init.\n waiter = waiter.__call__\n waiter_spec = _utils.getargspec(waiter)\n return 'last_result' in waiter_spec.args\n"
] |
"""Utilities for providing backward compatibility."""
import inspect
from fractions import Fraction
from warnings import warn
import six
from tenacity import _utils
def warn_about_non_retry_state_deprecation(cbname, func, stacklevel):
msg = (
'"%s" function must accept single "retry_state" parameter,'
' please update %s' % (cbname, _utils.get_callback_name(func)))
warn(msg, DeprecationWarning, stacklevel=stacklevel + 1)
def warn_about_dunder_non_retry_state_deprecation(fn, stacklevel):
msg = (
'"%s" method must be called with'
' single "retry_state" parameter' % (_utils.get_callback_name(fn)))
warn(msg, DeprecationWarning, stacklevel=stacklevel + 1)
def func_takes_retry_state(func):
if not six.callable(func):
raise Exception(func)
return False
if not inspect.isfunction(func) and not inspect.ismethod(func):
# func is a callable object rather than a function/method
func = func.__call__
func_spec = _utils.getargspec(func)
return 'retry_state' in func_spec.args
_unset = object()
def _make_unset_exception(func_name, **kwargs):
missing = []
for k, v in kwargs.iteritems():
if v is _unset:
missing.append(k)
missing_str = ', '.join(repr(s) for s in missing)
return TypeError(func_name + ' func missing parameters: ' + missing_str)
def _set_delay_since_start(retry_state, delay):
# Ensure outcome_timestamp - start_time is *exactly* equal to the delay to
# avoid complexity in test code.
retry_state.start_time = Fraction(retry_state.start_time)
retry_state.outcome_timestamp = (retry_state.start_time + Fraction(delay))
assert retry_state.seconds_since_start == delay
def make_retry_state(previous_attempt_number, delay_since_first_attempt,
last_result=None):
"""Construct RetryCallState for given attempt number & delay.
Only used in testing and thus is extra careful about timestamp arithmetics.
"""
required_parameter_unset = (previous_attempt_number is _unset or
delay_since_first_attempt is _unset)
if required_parameter_unset:
raise _make_unset_exception(
'wait/stop',
previous_attempt_number=previous_attempt_number,
delay_since_first_attempt=delay_since_first_attempt)
from tenacity import RetryCallState
retry_state = RetryCallState(None, None, (), {})
retry_state.attempt_number = previous_attempt_number
if last_result is not None:
retry_state.outcome = last_result
else:
retry_state.set_result(None)
_set_delay_since_start(retry_state, delay_since_first_attempt)
return retry_state
def func_takes_last_result(waiter):
"""Check if function has a "last_result" parameter.
Needed to provide backward compatibility for wait functions that didn't
take "last_result" in the beginning.
"""
if not six.callable(waiter):
return False
if not inspect.isfunction(waiter) and not inspect.ismethod(waiter):
# waiter is a class, check dunder-call rather than dunder-init.
waiter = waiter.__call__
waiter_spec = _utils.getargspec(waiter)
return 'last_result' in waiter_spec.args
def stop_dunder_call_accept_old_params(fn):
"""Decorate cls.__call__ method to accept old "stop" signature."""
@_utils.wraps(fn)
def new_fn(self,
previous_attempt_number=_unset,
delay_since_first_attempt=_unset,
retry_state=None):
if retry_state is None:
from tenacity import RetryCallState
retry_state_passed_as_non_kwarg = (
previous_attempt_number is not _unset and
isinstance(previous_attempt_number, RetryCallState))
if retry_state_passed_as_non_kwarg:
retry_state = previous_attempt_number
else:
warn_about_dunder_non_retry_state_deprecation(fn, stacklevel=2)
retry_state = make_retry_state(
previous_attempt_number=previous_attempt_number,
delay_since_first_attempt=delay_since_first_attempt)
return fn(self, retry_state=retry_state)
return new_fn
def stop_func_accept_retry_state(stop_func):
"""Wrap "stop" function to accept "retry_state" parameter."""
if not six.callable(stop_func):
return stop_func
if func_takes_retry_state(stop_func):
return stop_func
@_utils.wraps(stop_func)
def wrapped_stop_func(retry_state):
warn_about_non_retry_state_deprecation(
'stop', stop_func, stacklevel=4)
return stop_func(
retry_state.attempt_number,
retry_state.seconds_since_start,
)
return wrapped_stop_func
def wait_dunder_call_accept_old_params(fn):
"""Decorate cls.__call__ method to accept old "wait" signature."""
@_utils.wraps(fn)
def new_fn(self,
previous_attempt_number=_unset,
delay_since_first_attempt=_unset,
last_result=None,
retry_state=None):
if retry_state is None:
from tenacity import RetryCallState
retry_state_passed_as_non_kwarg = (
previous_attempt_number is not _unset and
isinstance(previous_attempt_number, RetryCallState))
if retry_state_passed_as_non_kwarg:
retry_state = previous_attempt_number
else:
warn_about_dunder_non_retry_state_deprecation(fn, stacklevel=2)
retry_state = make_retry_state(
previous_attempt_number=previous_attempt_number,
delay_since_first_attempt=delay_since_first_attempt,
last_result=last_result)
return fn(self, retry_state=retry_state)
return new_fn
def retry_dunder_call_accept_old_params(fn):
"""Decorate cls.__call__ method to accept old "retry" signature."""
@_utils.wraps(fn)
def new_fn(self, attempt=_unset, retry_state=None):
if retry_state is None:
from tenacity import RetryCallState
if attempt is _unset:
raise _make_unset_exception('retry', attempt=attempt)
retry_state_passed_as_non_kwarg = (
attempt is not _unset and
isinstance(attempt, RetryCallState))
if retry_state_passed_as_non_kwarg:
retry_state = attempt
else:
warn_about_dunder_non_retry_state_deprecation(fn, stacklevel=2)
retry_state = RetryCallState(None, None, (), {})
retry_state.outcome = attempt
return fn(self, retry_state=retry_state)
return new_fn
def retry_func_accept_retry_state(retry_func):
"""Wrap "retry" function to accept "retry_state" parameter."""
if not six.callable(retry_func):
return retry_func
if func_takes_retry_state(retry_func):
return retry_func
@_utils.wraps(retry_func)
def wrapped_retry_func(retry_state):
warn_about_non_retry_state_deprecation(
'retry', retry_func, stacklevel=4)
return retry_func(retry_state.outcome)
return wrapped_retry_func
def before_func_accept_retry_state(fn):
"""Wrap "before" function to accept "retry_state"."""
if not six.callable(fn):
return fn
if func_takes_retry_state(fn):
return fn
@_utils.wraps(fn)
def wrapped_before_func(retry_state):
# func, trial_number, trial_time_taken
warn_about_non_retry_state_deprecation('before', fn, stacklevel=4)
return fn(
retry_state.fn,
retry_state.attempt_number,
)
return wrapped_before_func
def after_func_accept_retry_state(fn):
"""Wrap "after" function to accept "retry_state"."""
if not six.callable(fn):
return fn
if func_takes_retry_state(fn):
return fn
@_utils.wraps(fn)
def wrapped_after_sleep_func(retry_state):
# func, trial_number, trial_time_taken
warn_about_non_retry_state_deprecation('after', fn, stacklevel=4)
return fn(
retry_state.fn,
retry_state.attempt_number,
retry_state.seconds_since_start)
return wrapped_after_sleep_func
def before_sleep_func_accept_retry_state(fn):
"""Wrap "before_sleep" function to accept "retry_state"."""
if not six.callable(fn):
return fn
if func_takes_retry_state(fn):
return fn
@_utils.wraps(fn)
def wrapped_before_sleep_func(retry_state):
# retry_object, sleep, last_result
warn_about_non_retry_state_deprecation(
'before_sleep', fn, stacklevel=4)
return fn(
retry_state.retry_object,
sleep=getattr(retry_state.next_action, 'sleep'),
last_result=retry_state.outcome)
return wrapped_before_sleep_func
def retry_error_callback_accept_retry_state(fn):
if not six.callable(fn):
return fn
if func_takes_retry_state(fn):
return fn
@_utils.wraps(fn)
def wrapped_retry_error_callback(retry_state):
warn_about_non_retry_state_deprecation(
'retry_error_callback', fn, stacklevel=4)
return fn(retry_state.outcome)
return wrapped_retry_error_callback
|
jd/tenacity
|
tenacity/compat.py
|
retry_dunder_call_accept_old_params
|
python
|
def retry_dunder_call_accept_old_params(fn):
@_utils.wraps(fn)
def new_fn(self, attempt=_unset, retry_state=None):
if retry_state is None:
from tenacity import RetryCallState
if attempt is _unset:
raise _make_unset_exception('retry', attempt=attempt)
retry_state_passed_as_non_kwarg = (
attempt is not _unset and
isinstance(attempt, RetryCallState))
if retry_state_passed_as_non_kwarg:
retry_state = attempt
else:
warn_about_dunder_non_retry_state_deprecation(fn, stacklevel=2)
retry_state = RetryCallState(None, None, (), {})
retry_state.outcome = attempt
return fn(self, retry_state=retry_state)
return new_fn
|
Decorate cls.__call__ method to accept old "retry" signature.
|
train
|
https://github.com/jd/tenacity/blob/354c40b7dc8e728c438668100dd020b65c84dfc6/tenacity/compat.py#L194-L212
| null |
"""Utilities for providing backward compatibility."""
import inspect
from fractions import Fraction
from warnings import warn
import six
from tenacity import _utils
def warn_about_non_retry_state_deprecation(cbname, func, stacklevel):
msg = (
'"%s" function must accept single "retry_state" parameter,'
' please update %s' % (cbname, _utils.get_callback_name(func)))
warn(msg, DeprecationWarning, stacklevel=stacklevel + 1)
def warn_about_dunder_non_retry_state_deprecation(fn, stacklevel):
msg = (
'"%s" method must be called with'
' single "retry_state" parameter' % (_utils.get_callback_name(fn)))
warn(msg, DeprecationWarning, stacklevel=stacklevel + 1)
def func_takes_retry_state(func):
if not six.callable(func):
raise Exception(func)
return False
if not inspect.isfunction(func) and not inspect.ismethod(func):
# func is a callable object rather than a function/method
func = func.__call__
func_spec = _utils.getargspec(func)
return 'retry_state' in func_spec.args
_unset = object()
def _make_unset_exception(func_name, **kwargs):
missing = []
for k, v in kwargs.iteritems():
if v is _unset:
missing.append(k)
missing_str = ', '.join(repr(s) for s in missing)
return TypeError(func_name + ' func missing parameters: ' + missing_str)
def _set_delay_since_start(retry_state, delay):
# Ensure outcome_timestamp - start_time is *exactly* equal to the delay to
# avoid complexity in test code.
retry_state.start_time = Fraction(retry_state.start_time)
retry_state.outcome_timestamp = (retry_state.start_time + Fraction(delay))
assert retry_state.seconds_since_start == delay
def make_retry_state(previous_attempt_number, delay_since_first_attempt,
last_result=None):
"""Construct RetryCallState for given attempt number & delay.
Only used in testing and thus is extra careful about timestamp arithmetics.
"""
required_parameter_unset = (previous_attempt_number is _unset or
delay_since_first_attempt is _unset)
if required_parameter_unset:
raise _make_unset_exception(
'wait/stop',
previous_attempt_number=previous_attempt_number,
delay_since_first_attempt=delay_since_first_attempt)
from tenacity import RetryCallState
retry_state = RetryCallState(None, None, (), {})
retry_state.attempt_number = previous_attempt_number
if last_result is not None:
retry_state.outcome = last_result
else:
retry_state.set_result(None)
_set_delay_since_start(retry_state, delay_since_first_attempt)
return retry_state
def func_takes_last_result(waiter):
"""Check if function has a "last_result" parameter.
Needed to provide backward compatibility for wait functions that didn't
take "last_result" in the beginning.
"""
if not six.callable(waiter):
return False
if not inspect.isfunction(waiter) and not inspect.ismethod(waiter):
# waiter is a class, check dunder-call rather than dunder-init.
waiter = waiter.__call__
waiter_spec = _utils.getargspec(waiter)
return 'last_result' in waiter_spec.args
def stop_dunder_call_accept_old_params(fn):
"""Decorate cls.__call__ method to accept old "stop" signature."""
@_utils.wraps(fn)
def new_fn(self,
previous_attempt_number=_unset,
delay_since_first_attempt=_unset,
retry_state=None):
if retry_state is None:
from tenacity import RetryCallState
retry_state_passed_as_non_kwarg = (
previous_attempt_number is not _unset and
isinstance(previous_attempt_number, RetryCallState))
if retry_state_passed_as_non_kwarg:
retry_state = previous_attempt_number
else:
warn_about_dunder_non_retry_state_deprecation(fn, stacklevel=2)
retry_state = make_retry_state(
previous_attempt_number=previous_attempt_number,
delay_since_first_attempt=delay_since_first_attempt)
return fn(self, retry_state=retry_state)
return new_fn
def stop_func_accept_retry_state(stop_func):
"""Wrap "stop" function to accept "retry_state" parameter."""
if not six.callable(stop_func):
return stop_func
if func_takes_retry_state(stop_func):
return stop_func
@_utils.wraps(stop_func)
def wrapped_stop_func(retry_state):
warn_about_non_retry_state_deprecation(
'stop', stop_func, stacklevel=4)
return stop_func(
retry_state.attempt_number,
retry_state.seconds_since_start,
)
return wrapped_stop_func
def wait_dunder_call_accept_old_params(fn):
"""Decorate cls.__call__ method to accept old "wait" signature."""
@_utils.wraps(fn)
def new_fn(self,
previous_attempt_number=_unset,
delay_since_first_attempt=_unset,
last_result=None,
retry_state=None):
if retry_state is None:
from tenacity import RetryCallState
retry_state_passed_as_non_kwarg = (
previous_attempt_number is not _unset and
isinstance(previous_attempt_number, RetryCallState))
if retry_state_passed_as_non_kwarg:
retry_state = previous_attempt_number
else:
warn_about_dunder_non_retry_state_deprecation(fn, stacklevel=2)
retry_state = make_retry_state(
previous_attempt_number=previous_attempt_number,
delay_since_first_attempt=delay_since_first_attempt,
last_result=last_result)
return fn(self, retry_state=retry_state)
return new_fn
def wait_func_accept_retry_state(wait_func):
"""Wrap wait function to accept "retry_state" parameter."""
if not six.callable(wait_func):
return wait_func
if func_takes_retry_state(wait_func):
return wait_func
if func_takes_last_result(wait_func):
@_utils.wraps(wait_func)
def wrapped_wait_func(retry_state):
warn_about_non_retry_state_deprecation(
'wait', wait_func, stacklevel=4)
return wait_func(
retry_state.attempt_number,
retry_state.seconds_since_start,
last_result=retry_state.outcome,
)
else:
@_utils.wraps(wait_func)
def wrapped_wait_func(retry_state):
warn_about_non_retry_state_deprecation(
'wait', wait_func, stacklevel=4)
return wait_func(
retry_state.attempt_number,
retry_state.seconds_since_start,
)
return wrapped_wait_func
def retry_func_accept_retry_state(retry_func):
"""Wrap "retry" function to accept "retry_state" parameter."""
if not six.callable(retry_func):
return retry_func
if func_takes_retry_state(retry_func):
return retry_func
@_utils.wraps(retry_func)
def wrapped_retry_func(retry_state):
warn_about_non_retry_state_deprecation(
'retry', retry_func, stacklevel=4)
return retry_func(retry_state.outcome)
return wrapped_retry_func
def before_func_accept_retry_state(fn):
"""Wrap "before" function to accept "retry_state"."""
if not six.callable(fn):
return fn
if func_takes_retry_state(fn):
return fn
@_utils.wraps(fn)
def wrapped_before_func(retry_state):
# func, trial_number, trial_time_taken
warn_about_non_retry_state_deprecation('before', fn, stacklevel=4)
return fn(
retry_state.fn,
retry_state.attempt_number,
)
return wrapped_before_func
def after_func_accept_retry_state(fn):
"""Wrap "after" function to accept "retry_state"."""
if not six.callable(fn):
return fn
if func_takes_retry_state(fn):
return fn
@_utils.wraps(fn)
def wrapped_after_sleep_func(retry_state):
# func, trial_number, trial_time_taken
warn_about_non_retry_state_deprecation('after', fn, stacklevel=4)
return fn(
retry_state.fn,
retry_state.attempt_number,
retry_state.seconds_since_start)
return wrapped_after_sleep_func
def before_sleep_func_accept_retry_state(fn):
"""Wrap "before_sleep" function to accept "retry_state"."""
if not six.callable(fn):
return fn
if func_takes_retry_state(fn):
return fn
@_utils.wraps(fn)
def wrapped_before_sleep_func(retry_state):
# retry_object, sleep, last_result
warn_about_non_retry_state_deprecation(
'before_sleep', fn, stacklevel=4)
return fn(
retry_state.retry_object,
sleep=getattr(retry_state.next_action, 'sleep'),
last_result=retry_state.outcome)
return wrapped_before_sleep_func
def retry_error_callback_accept_retry_state(fn):
if not six.callable(fn):
return fn
if func_takes_retry_state(fn):
return fn
@_utils.wraps(fn)
def wrapped_retry_error_callback(retry_state):
warn_about_non_retry_state_deprecation(
'retry_error_callback', fn, stacklevel=4)
return fn(retry_state.outcome)
return wrapped_retry_error_callback
|
jd/tenacity
|
tenacity/compat.py
|
retry_func_accept_retry_state
|
python
|
def retry_func_accept_retry_state(retry_func):
if not six.callable(retry_func):
return retry_func
if func_takes_retry_state(retry_func):
return retry_func
@_utils.wraps(retry_func)
def wrapped_retry_func(retry_state):
warn_about_non_retry_state_deprecation(
'retry', retry_func, stacklevel=4)
return retry_func(retry_state.outcome)
return wrapped_retry_func
|
Wrap "retry" function to accept "retry_state" parameter.
|
train
|
https://github.com/jd/tenacity/blob/354c40b7dc8e728c438668100dd020b65c84dfc6/tenacity/compat.py#L215-L228
|
[
"def func_takes_retry_state(func):\n if not six.callable(func):\n raise Exception(func)\n return False\n if not inspect.isfunction(func) and not inspect.ismethod(func):\n # func is a callable object rather than a function/method\n func = func.__call__\n func_spec = _utils.getargspec(func)\n return 'retry_state' in func_spec.args\n"
] |
"""Utilities for providing backward compatibility."""
import inspect
from fractions import Fraction
from warnings import warn
import six
from tenacity import _utils
def warn_about_non_retry_state_deprecation(cbname, func, stacklevel):
msg = (
'"%s" function must accept single "retry_state" parameter,'
' please update %s' % (cbname, _utils.get_callback_name(func)))
warn(msg, DeprecationWarning, stacklevel=stacklevel + 1)
def warn_about_dunder_non_retry_state_deprecation(fn, stacklevel):
msg = (
'"%s" method must be called with'
' single "retry_state" parameter' % (_utils.get_callback_name(fn)))
warn(msg, DeprecationWarning, stacklevel=stacklevel + 1)
def func_takes_retry_state(func):
if not six.callable(func):
raise Exception(func)
return False
if not inspect.isfunction(func) and not inspect.ismethod(func):
# func is a callable object rather than a function/method
func = func.__call__
func_spec = _utils.getargspec(func)
return 'retry_state' in func_spec.args
_unset = object()
def _make_unset_exception(func_name, **kwargs):
missing = []
for k, v in kwargs.iteritems():
if v is _unset:
missing.append(k)
missing_str = ', '.join(repr(s) for s in missing)
return TypeError(func_name + ' func missing parameters: ' + missing_str)
def _set_delay_since_start(retry_state, delay):
# Ensure outcome_timestamp - start_time is *exactly* equal to the delay to
# avoid complexity in test code.
retry_state.start_time = Fraction(retry_state.start_time)
retry_state.outcome_timestamp = (retry_state.start_time + Fraction(delay))
assert retry_state.seconds_since_start == delay
def make_retry_state(previous_attempt_number, delay_since_first_attempt,
last_result=None):
"""Construct RetryCallState for given attempt number & delay.
Only used in testing and thus is extra careful about timestamp arithmetics.
"""
required_parameter_unset = (previous_attempt_number is _unset or
delay_since_first_attempt is _unset)
if required_parameter_unset:
raise _make_unset_exception(
'wait/stop',
previous_attempt_number=previous_attempt_number,
delay_since_first_attempt=delay_since_first_attempt)
from tenacity import RetryCallState
retry_state = RetryCallState(None, None, (), {})
retry_state.attempt_number = previous_attempt_number
if last_result is not None:
retry_state.outcome = last_result
else:
retry_state.set_result(None)
_set_delay_since_start(retry_state, delay_since_first_attempt)
return retry_state
def func_takes_last_result(waiter):
"""Check if function has a "last_result" parameter.
Needed to provide backward compatibility for wait functions that didn't
take "last_result" in the beginning.
"""
if not six.callable(waiter):
return False
if not inspect.isfunction(waiter) and not inspect.ismethod(waiter):
# waiter is a class, check dunder-call rather than dunder-init.
waiter = waiter.__call__
waiter_spec = _utils.getargspec(waiter)
return 'last_result' in waiter_spec.args
def stop_dunder_call_accept_old_params(fn):
"""Decorate cls.__call__ method to accept old "stop" signature."""
@_utils.wraps(fn)
def new_fn(self,
previous_attempt_number=_unset,
delay_since_first_attempt=_unset,
retry_state=None):
if retry_state is None:
from tenacity import RetryCallState
retry_state_passed_as_non_kwarg = (
previous_attempt_number is not _unset and
isinstance(previous_attempt_number, RetryCallState))
if retry_state_passed_as_non_kwarg:
retry_state = previous_attempt_number
else:
warn_about_dunder_non_retry_state_deprecation(fn, stacklevel=2)
retry_state = make_retry_state(
previous_attempt_number=previous_attempt_number,
delay_since_first_attempt=delay_since_first_attempt)
return fn(self, retry_state=retry_state)
return new_fn
def stop_func_accept_retry_state(stop_func):
"""Wrap "stop" function to accept "retry_state" parameter."""
if not six.callable(stop_func):
return stop_func
if func_takes_retry_state(stop_func):
return stop_func
@_utils.wraps(stop_func)
def wrapped_stop_func(retry_state):
warn_about_non_retry_state_deprecation(
'stop', stop_func, stacklevel=4)
return stop_func(
retry_state.attempt_number,
retry_state.seconds_since_start,
)
return wrapped_stop_func
def wait_dunder_call_accept_old_params(fn):
"""Decorate cls.__call__ method to accept old "wait" signature."""
@_utils.wraps(fn)
def new_fn(self,
previous_attempt_number=_unset,
delay_since_first_attempt=_unset,
last_result=None,
retry_state=None):
if retry_state is None:
from tenacity import RetryCallState
retry_state_passed_as_non_kwarg = (
previous_attempt_number is not _unset and
isinstance(previous_attempt_number, RetryCallState))
if retry_state_passed_as_non_kwarg:
retry_state = previous_attempt_number
else:
warn_about_dunder_non_retry_state_deprecation(fn, stacklevel=2)
retry_state = make_retry_state(
previous_attempt_number=previous_attempt_number,
delay_since_first_attempt=delay_since_first_attempt,
last_result=last_result)
return fn(self, retry_state=retry_state)
return new_fn
def wait_func_accept_retry_state(wait_func):
"""Wrap wait function to accept "retry_state" parameter."""
if not six.callable(wait_func):
return wait_func
if func_takes_retry_state(wait_func):
return wait_func
if func_takes_last_result(wait_func):
@_utils.wraps(wait_func)
def wrapped_wait_func(retry_state):
warn_about_non_retry_state_deprecation(
'wait', wait_func, stacklevel=4)
return wait_func(
retry_state.attempt_number,
retry_state.seconds_since_start,
last_result=retry_state.outcome,
)
else:
@_utils.wraps(wait_func)
def wrapped_wait_func(retry_state):
warn_about_non_retry_state_deprecation(
'wait', wait_func, stacklevel=4)
return wait_func(
retry_state.attempt_number,
retry_state.seconds_since_start,
)
return wrapped_wait_func
def retry_dunder_call_accept_old_params(fn):
"""Decorate cls.__call__ method to accept old "retry" signature."""
@_utils.wraps(fn)
def new_fn(self, attempt=_unset, retry_state=None):
if retry_state is None:
from tenacity import RetryCallState
if attempt is _unset:
raise _make_unset_exception('retry', attempt=attempt)
retry_state_passed_as_non_kwarg = (
attempt is not _unset and
isinstance(attempt, RetryCallState))
if retry_state_passed_as_non_kwarg:
retry_state = attempt
else:
warn_about_dunder_non_retry_state_deprecation(fn, stacklevel=2)
retry_state = RetryCallState(None, None, (), {})
retry_state.outcome = attempt
return fn(self, retry_state=retry_state)
return new_fn
def before_func_accept_retry_state(fn):
"""Wrap "before" function to accept "retry_state"."""
if not six.callable(fn):
return fn
if func_takes_retry_state(fn):
return fn
@_utils.wraps(fn)
def wrapped_before_func(retry_state):
# func, trial_number, trial_time_taken
warn_about_non_retry_state_deprecation('before', fn, stacklevel=4)
return fn(
retry_state.fn,
retry_state.attempt_number,
)
return wrapped_before_func
def after_func_accept_retry_state(fn):
"""Wrap "after" function to accept "retry_state"."""
if not six.callable(fn):
return fn
if func_takes_retry_state(fn):
return fn
@_utils.wraps(fn)
def wrapped_after_sleep_func(retry_state):
# func, trial_number, trial_time_taken
warn_about_non_retry_state_deprecation('after', fn, stacklevel=4)
return fn(
retry_state.fn,
retry_state.attempt_number,
retry_state.seconds_since_start)
return wrapped_after_sleep_func
def before_sleep_func_accept_retry_state(fn):
"""Wrap "before_sleep" function to accept "retry_state"."""
if not six.callable(fn):
return fn
if func_takes_retry_state(fn):
return fn
@_utils.wraps(fn)
def wrapped_before_sleep_func(retry_state):
# retry_object, sleep, last_result
warn_about_non_retry_state_deprecation(
'before_sleep', fn, stacklevel=4)
return fn(
retry_state.retry_object,
sleep=getattr(retry_state.next_action, 'sleep'),
last_result=retry_state.outcome)
return wrapped_before_sleep_func
def retry_error_callback_accept_retry_state(fn):
if not six.callable(fn):
return fn
if func_takes_retry_state(fn):
return fn
@_utils.wraps(fn)
def wrapped_retry_error_callback(retry_state):
warn_about_non_retry_state_deprecation(
'retry_error_callback', fn, stacklevel=4)
return fn(retry_state.outcome)
return wrapped_retry_error_callback
|
jd/tenacity
|
tenacity/compat.py
|
before_func_accept_retry_state
|
python
|
def before_func_accept_retry_state(fn):
if not six.callable(fn):
return fn
if func_takes_retry_state(fn):
return fn
@_utils.wraps(fn)
def wrapped_before_func(retry_state):
# func, trial_number, trial_time_taken
warn_about_non_retry_state_deprecation('before', fn, stacklevel=4)
return fn(
retry_state.fn,
retry_state.attempt_number,
)
return wrapped_before_func
|
Wrap "before" function to accept "retry_state".
|
train
|
https://github.com/jd/tenacity/blob/354c40b7dc8e728c438668100dd020b65c84dfc6/tenacity/compat.py#L231-L247
|
[
"def func_takes_retry_state(func):\n if not six.callable(func):\n raise Exception(func)\n return False\n if not inspect.isfunction(func) and not inspect.ismethod(func):\n # func is a callable object rather than a function/method\n func = func.__call__\n func_spec = _utils.getargspec(func)\n return 'retry_state' in func_spec.args\n"
] |
"""Utilities for providing backward compatibility."""
import inspect
from fractions import Fraction
from warnings import warn
import six
from tenacity import _utils
def warn_about_non_retry_state_deprecation(cbname, func, stacklevel):
msg = (
'"%s" function must accept single "retry_state" parameter,'
' please update %s' % (cbname, _utils.get_callback_name(func)))
warn(msg, DeprecationWarning, stacklevel=stacklevel + 1)
def warn_about_dunder_non_retry_state_deprecation(fn, stacklevel):
msg = (
'"%s" method must be called with'
' single "retry_state" parameter' % (_utils.get_callback_name(fn)))
warn(msg, DeprecationWarning, stacklevel=stacklevel + 1)
def func_takes_retry_state(func):
if not six.callable(func):
raise Exception(func)
return False
if not inspect.isfunction(func) and not inspect.ismethod(func):
# func is a callable object rather than a function/method
func = func.__call__
func_spec = _utils.getargspec(func)
return 'retry_state' in func_spec.args
_unset = object()
def _make_unset_exception(func_name, **kwargs):
missing = []
for k, v in kwargs.iteritems():
if v is _unset:
missing.append(k)
missing_str = ', '.join(repr(s) for s in missing)
return TypeError(func_name + ' func missing parameters: ' + missing_str)
def _set_delay_since_start(retry_state, delay):
# Ensure outcome_timestamp - start_time is *exactly* equal to the delay to
# avoid complexity in test code.
retry_state.start_time = Fraction(retry_state.start_time)
retry_state.outcome_timestamp = (retry_state.start_time + Fraction(delay))
assert retry_state.seconds_since_start == delay
def make_retry_state(previous_attempt_number, delay_since_first_attempt,
last_result=None):
"""Construct RetryCallState for given attempt number & delay.
Only used in testing and thus is extra careful about timestamp arithmetics.
"""
required_parameter_unset = (previous_attempt_number is _unset or
delay_since_first_attempt is _unset)
if required_parameter_unset:
raise _make_unset_exception(
'wait/stop',
previous_attempt_number=previous_attempt_number,
delay_since_first_attempt=delay_since_first_attempt)
from tenacity import RetryCallState
retry_state = RetryCallState(None, None, (), {})
retry_state.attempt_number = previous_attempt_number
if last_result is not None:
retry_state.outcome = last_result
else:
retry_state.set_result(None)
_set_delay_since_start(retry_state, delay_since_first_attempt)
return retry_state
def func_takes_last_result(waiter):
"""Check if function has a "last_result" parameter.
Needed to provide backward compatibility for wait functions that didn't
take "last_result" in the beginning.
"""
if not six.callable(waiter):
return False
if not inspect.isfunction(waiter) and not inspect.ismethod(waiter):
# waiter is a class, check dunder-call rather than dunder-init.
waiter = waiter.__call__
waiter_spec = _utils.getargspec(waiter)
return 'last_result' in waiter_spec.args
def stop_dunder_call_accept_old_params(fn):
"""Decorate cls.__call__ method to accept old "stop" signature."""
@_utils.wraps(fn)
def new_fn(self,
previous_attempt_number=_unset,
delay_since_first_attempt=_unset,
retry_state=None):
if retry_state is None:
from tenacity import RetryCallState
retry_state_passed_as_non_kwarg = (
previous_attempt_number is not _unset and
isinstance(previous_attempt_number, RetryCallState))
if retry_state_passed_as_non_kwarg:
retry_state = previous_attempt_number
else:
warn_about_dunder_non_retry_state_deprecation(fn, stacklevel=2)
retry_state = make_retry_state(
previous_attempt_number=previous_attempt_number,
delay_since_first_attempt=delay_since_first_attempt)
return fn(self, retry_state=retry_state)
return new_fn
def stop_func_accept_retry_state(stop_func):
"""Wrap "stop" function to accept "retry_state" parameter."""
if not six.callable(stop_func):
return stop_func
if func_takes_retry_state(stop_func):
return stop_func
@_utils.wraps(stop_func)
def wrapped_stop_func(retry_state):
warn_about_non_retry_state_deprecation(
'stop', stop_func, stacklevel=4)
return stop_func(
retry_state.attempt_number,
retry_state.seconds_since_start,
)
return wrapped_stop_func
def wait_dunder_call_accept_old_params(fn):
"""Decorate cls.__call__ method to accept old "wait" signature."""
@_utils.wraps(fn)
def new_fn(self,
previous_attempt_number=_unset,
delay_since_first_attempt=_unset,
last_result=None,
retry_state=None):
if retry_state is None:
from tenacity import RetryCallState
retry_state_passed_as_non_kwarg = (
previous_attempt_number is not _unset and
isinstance(previous_attempt_number, RetryCallState))
if retry_state_passed_as_non_kwarg:
retry_state = previous_attempt_number
else:
warn_about_dunder_non_retry_state_deprecation(fn, stacklevel=2)
retry_state = make_retry_state(
previous_attempt_number=previous_attempt_number,
delay_since_first_attempt=delay_since_first_attempt,
last_result=last_result)
return fn(self, retry_state=retry_state)
return new_fn
def wait_func_accept_retry_state(wait_func):
"""Wrap wait function to accept "retry_state" parameter."""
if not six.callable(wait_func):
return wait_func
if func_takes_retry_state(wait_func):
return wait_func
if func_takes_last_result(wait_func):
@_utils.wraps(wait_func)
def wrapped_wait_func(retry_state):
warn_about_non_retry_state_deprecation(
'wait', wait_func, stacklevel=4)
return wait_func(
retry_state.attempt_number,
retry_state.seconds_since_start,
last_result=retry_state.outcome,
)
else:
@_utils.wraps(wait_func)
def wrapped_wait_func(retry_state):
warn_about_non_retry_state_deprecation(
'wait', wait_func, stacklevel=4)
return wait_func(
retry_state.attempt_number,
retry_state.seconds_since_start,
)
return wrapped_wait_func
def retry_dunder_call_accept_old_params(fn):
"""Decorate cls.__call__ method to accept old "retry" signature."""
@_utils.wraps(fn)
def new_fn(self, attempt=_unset, retry_state=None):
if retry_state is None:
from tenacity import RetryCallState
if attempt is _unset:
raise _make_unset_exception('retry', attempt=attempt)
retry_state_passed_as_non_kwarg = (
attempt is not _unset and
isinstance(attempt, RetryCallState))
if retry_state_passed_as_non_kwarg:
retry_state = attempt
else:
warn_about_dunder_non_retry_state_deprecation(fn, stacklevel=2)
retry_state = RetryCallState(None, None, (), {})
retry_state.outcome = attempt
return fn(self, retry_state=retry_state)
return new_fn
def retry_func_accept_retry_state(retry_func):
"""Wrap "retry" function to accept "retry_state" parameter."""
if not six.callable(retry_func):
return retry_func
if func_takes_retry_state(retry_func):
return retry_func
@_utils.wraps(retry_func)
def wrapped_retry_func(retry_state):
warn_about_non_retry_state_deprecation(
'retry', retry_func, stacklevel=4)
return retry_func(retry_state.outcome)
return wrapped_retry_func
def after_func_accept_retry_state(fn):
"""Wrap "after" function to accept "retry_state"."""
if not six.callable(fn):
return fn
if func_takes_retry_state(fn):
return fn
@_utils.wraps(fn)
def wrapped_after_sleep_func(retry_state):
# func, trial_number, trial_time_taken
warn_about_non_retry_state_deprecation('after', fn, stacklevel=4)
return fn(
retry_state.fn,
retry_state.attempt_number,
retry_state.seconds_since_start)
return wrapped_after_sleep_func
def before_sleep_func_accept_retry_state(fn):
"""Wrap "before_sleep" function to accept "retry_state"."""
if not six.callable(fn):
return fn
if func_takes_retry_state(fn):
return fn
@_utils.wraps(fn)
def wrapped_before_sleep_func(retry_state):
# retry_object, sleep, last_result
warn_about_non_retry_state_deprecation(
'before_sleep', fn, stacklevel=4)
return fn(
retry_state.retry_object,
sleep=getattr(retry_state.next_action, 'sleep'),
last_result=retry_state.outcome)
return wrapped_before_sleep_func
def retry_error_callback_accept_retry_state(fn):
if not six.callable(fn):
return fn
if func_takes_retry_state(fn):
return fn
@_utils.wraps(fn)
def wrapped_retry_error_callback(retry_state):
warn_about_non_retry_state_deprecation(
'retry_error_callback', fn, stacklevel=4)
return fn(retry_state.outcome)
return wrapped_retry_error_callback
|
jd/tenacity
|
tenacity/compat.py
|
after_func_accept_retry_state
|
python
|
def after_func_accept_retry_state(fn):
if not six.callable(fn):
return fn
if func_takes_retry_state(fn):
return fn
@_utils.wraps(fn)
def wrapped_after_sleep_func(retry_state):
# func, trial_number, trial_time_taken
warn_about_non_retry_state_deprecation('after', fn, stacklevel=4)
return fn(
retry_state.fn,
retry_state.attempt_number,
retry_state.seconds_since_start)
return wrapped_after_sleep_func
|
Wrap "after" function to accept "retry_state".
|
train
|
https://github.com/jd/tenacity/blob/354c40b7dc8e728c438668100dd020b65c84dfc6/tenacity/compat.py#L250-L266
|
[
"def func_takes_retry_state(func):\n if not six.callable(func):\n raise Exception(func)\n return False\n if not inspect.isfunction(func) and not inspect.ismethod(func):\n # func is a callable object rather than a function/method\n func = func.__call__\n func_spec = _utils.getargspec(func)\n return 'retry_state' in func_spec.args\n"
] |
"""Utilities for providing backward compatibility."""
import inspect
from fractions import Fraction
from warnings import warn
import six
from tenacity import _utils
def warn_about_non_retry_state_deprecation(cbname, func, stacklevel):
msg = (
'"%s" function must accept single "retry_state" parameter,'
' please update %s' % (cbname, _utils.get_callback_name(func)))
warn(msg, DeprecationWarning, stacklevel=stacklevel + 1)
def warn_about_dunder_non_retry_state_deprecation(fn, stacklevel):
msg = (
'"%s" method must be called with'
' single "retry_state" parameter' % (_utils.get_callback_name(fn)))
warn(msg, DeprecationWarning, stacklevel=stacklevel + 1)
def func_takes_retry_state(func):
if not six.callable(func):
raise Exception(func)
return False
if not inspect.isfunction(func) and not inspect.ismethod(func):
# func is a callable object rather than a function/method
func = func.__call__
func_spec = _utils.getargspec(func)
return 'retry_state' in func_spec.args
_unset = object()
def _make_unset_exception(func_name, **kwargs):
missing = []
for k, v in kwargs.iteritems():
if v is _unset:
missing.append(k)
missing_str = ', '.join(repr(s) for s in missing)
return TypeError(func_name + ' func missing parameters: ' + missing_str)
def _set_delay_since_start(retry_state, delay):
# Ensure outcome_timestamp - start_time is *exactly* equal to the delay to
# avoid complexity in test code.
retry_state.start_time = Fraction(retry_state.start_time)
retry_state.outcome_timestamp = (retry_state.start_time + Fraction(delay))
assert retry_state.seconds_since_start == delay
def make_retry_state(previous_attempt_number, delay_since_first_attempt,
last_result=None):
"""Construct RetryCallState for given attempt number & delay.
Only used in testing and thus is extra careful about timestamp arithmetics.
"""
required_parameter_unset = (previous_attempt_number is _unset or
delay_since_first_attempt is _unset)
if required_parameter_unset:
raise _make_unset_exception(
'wait/stop',
previous_attempt_number=previous_attempt_number,
delay_since_first_attempt=delay_since_first_attempt)
from tenacity import RetryCallState
retry_state = RetryCallState(None, None, (), {})
retry_state.attempt_number = previous_attempt_number
if last_result is not None:
retry_state.outcome = last_result
else:
retry_state.set_result(None)
_set_delay_since_start(retry_state, delay_since_first_attempt)
return retry_state
def func_takes_last_result(waiter):
"""Check if function has a "last_result" parameter.
Needed to provide backward compatibility for wait functions that didn't
take "last_result" in the beginning.
"""
if not six.callable(waiter):
return False
if not inspect.isfunction(waiter) and not inspect.ismethod(waiter):
# waiter is a class, check dunder-call rather than dunder-init.
waiter = waiter.__call__
waiter_spec = _utils.getargspec(waiter)
return 'last_result' in waiter_spec.args
def stop_dunder_call_accept_old_params(fn):
"""Decorate cls.__call__ method to accept old "stop" signature."""
@_utils.wraps(fn)
def new_fn(self,
previous_attempt_number=_unset,
delay_since_first_attempt=_unset,
retry_state=None):
if retry_state is None:
from tenacity import RetryCallState
retry_state_passed_as_non_kwarg = (
previous_attempt_number is not _unset and
isinstance(previous_attempt_number, RetryCallState))
if retry_state_passed_as_non_kwarg:
retry_state = previous_attempt_number
else:
warn_about_dunder_non_retry_state_deprecation(fn, stacklevel=2)
retry_state = make_retry_state(
previous_attempt_number=previous_attempt_number,
delay_since_first_attempt=delay_since_first_attempt)
return fn(self, retry_state=retry_state)
return new_fn
def stop_func_accept_retry_state(stop_func):
"""Wrap "stop" function to accept "retry_state" parameter."""
if not six.callable(stop_func):
return stop_func
if func_takes_retry_state(stop_func):
return stop_func
@_utils.wraps(stop_func)
def wrapped_stop_func(retry_state):
warn_about_non_retry_state_deprecation(
'stop', stop_func, stacklevel=4)
return stop_func(
retry_state.attempt_number,
retry_state.seconds_since_start,
)
return wrapped_stop_func
def wait_dunder_call_accept_old_params(fn):
"""Decorate cls.__call__ method to accept old "wait" signature."""
@_utils.wraps(fn)
def new_fn(self,
previous_attempt_number=_unset,
delay_since_first_attempt=_unset,
last_result=None,
retry_state=None):
if retry_state is None:
from tenacity import RetryCallState
retry_state_passed_as_non_kwarg = (
previous_attempt_number is not _unset and
isinstance(previous_attempt_number, RetryCallState))
if retry_state_passed_as_non_kwarg:
retry_state = previous_attempt_number
else:
warn_about_dunder_non_retry_state_deprecation(fn, stacklevel=2)
retry_state = make_retry_state(
previous_attempt_number=previous_attempt_number,
delay_since_first_attempt=delay_since_first_attempt,
last_result=last_result)
return fn(self, retry_state=retry_state)
return new_fn
def wait_func_accept_retry_state(wait_func):
"""Wrap wait function to accept "retry_state" parameter."""
if not six.callable(wait_func):
return wait_func
if func_takes_retry_state(wait_func):
return wait_func
if func_takes_last_result(wait_func):
@_utils.wraps(wait_func)
def wrapped_wait_func(retry_state):
warn_about_non_retry_state_deprecation(
'wait', wait_func, stacklevel=4)
return wait_func(
retry_state.attempt_number,
retry_state.seconds_since_start,
last_result=retry_state.outcome,
)
else:
@_utils.wraps(wait_func)
def wrapped_wait_func(retry_state):
warn_about_non_retry_state_deprecation(
'wait', wait_func, stacklevel=4)
return wait_func(
retry_state.attempt_number,
retry_state.seconds_since_start,
)
return wrapped_wait_func
def retry_dunder_call_accept_old_params(fn):
"""Decorate cls.__call__ method to accept old "retry" signature."""
@_utils.wraps(fn)
def new_fn(self, attempt=_unset, retry_state=None):
if retry_state is None:
from tenacity import RetryCallState
if attempt is _unset:
raise _make_unset_exception('retry', attempt=attempt)
retry_state_passed_as_non_kwarg = (
attempt is not _unset and
isinstance(attempt, RetryCallState))
if retry_state_passed_as_non_kwarg:
retry_state = attempt
else:
warn_about_dunder_non_retry_state_deprecation(fn, stacklevel=2)
retry_state = RetryCallState(None, None, (), {})
retry_state.outcome = attempt
return fn(self, retry_state=retry_state)
return new_fn
def retry_func_accept_retry_state(retry_func):
"""Wrap "retry" function to accept "retry_state" parameter."""
if not six.callable(retry_func):
return retry_func
if func_takes_retry_state(retry_func):
return retry_func
@_utils.wraps(retry_func)
def wrapped_retry_func(retry_state):
warn_about_non_retry_state_deprecation(
'retry', retry_func, stacklevel=4)
return retry_func(retry_state.outcome)
return wrapped_retry_func
def before_func_accept_retry_state(fn):
"""Wrap "before" function to accept "retry_state"."""
if not six.callable(fn):
return fn
if func_takes_retry_state(fn):
return fn
@_utils.wraps(fn)
def wrapped_before_func(retry_state):
# func, trial_number, trial_time_taken
warn_about_non_retry_state_deprecation('before', fn, stacklevel=4)
return fn(
retry_state.fn,
retry_state.attempt_number,
)
return wrapped_before_func
def before_sleep_func_accept_retry_state(fn):
"""Wrap "before_sleep" function to accept "retry_state"."""
if not six.callable(fn):
return fn
if func_takes_retry_state(fn):
return fn
@_utils.wraps(fn)
def wrapped_before_sleep_func(retry_state):
# retry_object, sleep, last_result
warn_about_non_retry_state_deprecation(
'before_sleep', fn, stacklevel=4)
return fn(
retry_state.retry_object,
sleep=getattr(retry_state.next_action, 'sleep'),
last_result=retry_state.outcome)
return wrapped_before_sleep_func
def retry_error_callback_accept_retry_state(fn):
if not six.callable(fn):
return fn
if func_takes_retry_state(fn):
return fn
@_utils.wraps(fn)
def wrapped_retry_error_callback(retry_state):
warn_about_non_retry_state_deprecation(
'retry_error_callback', fn, stacklevel=4)
return fn(retry_state.outcome)
return wrapped_retry_error_callback
|
jd/tenacity
|
tenacity/compat.py
|
before_sleep_func_accept_retry_state
|
python
|
def before_sleep_func_accept_retry_state(fn):
if not six.callable(fn):
return fn
if func_takes_retry_state(fn):
return fn
@_utils.wraps(fn)
def wrapped_before_sleep_func(retry_state):
# retry_object, sleep, last_result
warn_about_non_retry_state_deprecation(
'before_sleep', fn, stacklevel=4)
return fn(
retry_state.retry_object,
sleep=getattr(retry_state.next_action, 'sleep'),
last_result=retry_state.outcome)
return wrapped_before_sleep_func
|
Wrap "before_sleep" function to accept "retry_state".
|
train
|
https://github.com/jd/tenacity/blob/354c40b7dc8e728c438668100dd020b65c84dfc6/tenacity/compat.py#L269-L286
|
[
"def func_takes_retry_state(func):\n if not six.callable(func):\n raise Exception(func)\n return False\n if not inspect.isfunction(func) and not inspect.ismethod(func):\n # func is a callable object rather than a function/method\n func = func.__call__\n func_spec = _utils.getargspec(func)\n return 'retry_state' in func_spec.args\n"
] |
"""Utilities for providing backward compatibility."""
import inspect
from fractions import Fraction
from warnings import warn
import six
from tenacity import _utils
def warn_about_non_retry_state_deprecation(cbname, func, stacklevel):
msg = (
'"%s" function must accept single "retry_state" parameter,'
' please update %s' % (cbname, _utils.get_callback_name(func)))
warn(msg, DeprecationWarning, stacklevel=stacklevel + 1)
def warn_about_dunder_non_retry_state_deprecation(fn, stacklevel):
msg = (
'"%s" method must be called with'
' single "retry_state" parameter' % (_utils.get_callback_name(fn)))
warn(msg, DeprecationWarning, stacklevel=stacklevel + 1)
def func_takes_retry_state(func):
if not six.callable(func):
raise Exception(func)
return False
if not inspect.isfunction(func) and not inspect.ismethod(func):
# func is a callable object rather than a function/method
func = func.__call__
func_spec = _utils.getargspec(func)
return 'retry_state' in func_spec.args
_unset = object()
def _make_unset_exception(func_name, **kwargs):
missing = []
for k, v in kwargs.iteritems():
if v is _unset:
missing.append(k)
missing_str = ', '.join(repr(s) for s in missing)
return TypeError(func_name + ' func missing parameters: ' + missing_str)
def _set_delay_since_start(retry_state, delay):
# Ensure outcome_timestamp - start_time is *exactly* equal to the delay to
# avoid complexity in test code.
retry_state.start_time = Fraction(retry_state.start_time)
retry_state.outcome_timestamp = (retry_state.start_time + Fraction(delay))
assert retry_state.seconds_since_start == delay
def make_retry_state(previous_attempt_number, delay_since_first_attempt,
last_result=None):
"""Construct RetryCallState for given attempt number & delay.
Only used in testing and thus is extra careful about timestamp arithmetics.
"""
required_parameter_unset = (previous_attempt_number is _unset or
delay_since_first_attempt is _unset)
if required_parameter_unset:
raise _make_unset_exception(
'wait/stop',
previous_attempt_number=previous_attempt_number,
delay_since_first_attempt=delay_since_first_attempt)
from tenacity import RetryCallState
retry_state = RetryCallState(None, None, (), {})
retry_state.attempt_number = previous_attempt_number
if last_result is not None:
retry_state.outcome = last_result
else:
retry_state.set_result(None)
_set_delay_since_start(retry_state, delay_since_first_attempt)
return retry_state
def func_takes_last_result(waiter):
"""Check if function has a "last_result" parameter.
Needed to provide backward compatibility for wait functions that didn't
take "last_result" in the beginning.
"""
if not six.callable(waiter):
return False
if not inspect.isfunction(waiter) and not inspect.ismethod(waiter):
# waiter is a class, check dunder-call rather than dunder-init.
waiter = waiter.__call__
waiter_spec = _utils.getargspec(waiter)
return 'last_result' in waiter_spec.args
def stop_dunder_call_accept_old_params(fn):
"""Decorate cls.__call__ method to accept old "stop" signature."""
@_utils.wraps(fn)
def new_fn(self,
previous_attempt_number=_unset,
delay_since_first_attempt=_unset,
retry_state=None):
if retry_state is None:
from tenacity import RetryCallState
retry_state_passed_as_non_kwarg = (
previous_attempt_number is not _unset and
isinstance(previous_attempt_number, RetryCallState))
if retry_state_passed_as_non_kwarg:
retry_state = previous_attempt_number
else:
warn_about_dunder_non_retry_state_deprecation(fn, stacklevel=2)
retry_state = make_retry_state(
previous_attempt_number=previous_attempt_number,
delay_since_first_attempt=delay_since_first_attempt)
return fn(self, retry_state=retry_state)
return new_fn
def stop_func_accept_retry_state(stop_func):
"""Wrap "stop" function to accept "retry_state" parameter."""
if not six.callable(stop_func):
return stop_func
if func_takes_retry_state(stop_func):
return stop_func
@_utils.wraps(stop_func)
def wrapped_stop_func(retry_state):
warn_about_non_retry_state_deprecation(
'stop', stop_func, stacklevel=4)
return stop_func(
retry_state.attempt_number,
retry_state.seconds_since_start,
)
return wrapped_stop_func
def wait_dunder_call_accept_old_params(fn):
"""Decorate cls.__call__ method to accept old "wait" signature."""
@_utils.wraps(fn)
def new_fn(self,
previous_attempt_number=_unset,
delay_since_first_attempt=_unset,
last_result=None,
retry_state=None):
if retry_state is None:
from tenacity import RetryCallState
retry_state_passed_as_non_kwarg = (
previous_attempt_number is not _unset and
isinstance(previous_attempt_number, RetryCallState))
if retry_state_passed_as_non_kwarg:
retry_state = previous_attempt_number
else:
warn_about_dunder_non_retry_state_deprecation(fn, stacklevel=2)
retry_state = make_retry_state(
previous_attempt_number=previous_attempt_number,
delay_since_first_attempt=delay_since_first_attempt,
last_result=last_result)
return fn(self, retry_state=retry_state)
return new_fn
def wait_func_accept_retry_state(wait_func):
"""Wrap wait function to accept "retry_state" parameter."""
if not six.callable(wait_func):
return wait_func
if func_takes_retry_state(wait_func):
return wait_func
if func_takes_last_result(wait_func):
@_utils.wraps(wait_func)
def wrapped_wait_func(retry_state):
warn_about_non_retry_state_deprecation(
'wait', wait_func, stacklevel=4)
return wait_func(
retry_state.attempt_number,
retry_state.seconds_since_start,
last_result=retry_state.outcome,
)
else:
@_utils.wraps(wait_func)
def wrapped_wait_func(retry_state):
warn_about_non_retry_state_deprecation(
'wait', wait_func, stacklevel=4)
return wait_func(
retry_state.attempt_number,
retry_state.seconds_since_start,
)
return wrapped_wait_func
def retry_dunder_call_accept_old_params(fn):
"""Decorate cls.__call__ method to accept old "retry" signature."""
@_utils.wraps(fn)
def new_fn(self, attempt=_unset, retry_state=None):
if retry_state is None:
from tenacity import RetryCallState
if attempt is _unset:
raise _make_unset_exception('retry', attempt=attempt)
retry_state_passed_as_non_kwarg = (
attempt is not _unset and
isinstance(attempt, RetryCallState))
if retry_state_passed_as_non_kwarg:
retry_state = attempt
else:
warn_about_dunder_non_retry_state_deprecation(fn, stacklevel=2)
retry_state = RetryCallState(None, None, (), {})
retry_state.outcome = attempt
return fn(self, retry_state=retry_state)
return new_fn
def retry_func_accept_retry_state(retry_func):
"""Wrap "retry" function to accept "retry_state" parameter."""
if not six.callable(retry_func):
return retry_func
if func_takes_retry_state(retry_func):
return retry_func
@_utils.wraps(retry_func)
def wrapped_retry_func(retry_state):
warn_about_non_retry_state_deprecation(
'retry', retry_func, stacklevel=4)
return retry_func(retry_state.outcome)
return wrapped_retry_func
def before_func_accept_retry_state(fn):
"""Wrap "before" function to accept "retry_state"."""
if not six.callable(fn):
return fn
if func_takes_retry_state(fn):
return fn
@_utils.wraps(fn)
def wrapped_before_func(retry_state):
# func, trial_number, trial_time_taken
warn_about_non_retry_state_deprecation('before', fn, stacklevel=4)
return fn(
retry_state.fn,
retry_state.attempt_number,
)
return wrapped_before_func
def after_func_accept_retry_state(fn):
"""Wrap "after" function to accept "retry_state"."""
if not six.callable(fn):
return fn
if func_takes_retry_state(fn):
return fn
@_utils.wraps(fn)
def wrapped_after_sleep_func(retry_state):
# func, trial_number, trial_time_taken
warn_about_non_retry_state_deprecation('after', fn, stacklevel=4)
return fn(
retry_state.fn,
retry_state.attempt_number,
retry_state.seconds_since_start)
return wrapped_after_sleep_func
def retry_error_callback_accept_retry_state(fn):
if not six.callable(fn):
return fn
if func_takes_retry_state(fn):
return fn
@_utils.wraps(fn)
def wrapped_retry_error_callback(retry_state):
warn_about_non_retry_state_deprecation(
'retry_error_callback', fn, stacklevel=4)
return fn(retry_state.outcome)
return wrapped_retry_error_callback
|
markreidvfx/pyaaf2
|
aaf2/file.py
|
AAFFile.save
|
python
|
def save(self):
if self.mode in ("wb+", 'rb+'):
if not self.is_open:
raise IOError("file closed")
self.write_reference_properties()
self.manager.write_objects()
|
Writes current changes to disk and flushes modified objects in the
AAFObjectManager
|
train
|
https://github.com/markreidvfx/pyaaf2/blob/37de8c10d3c3495cc00c705eb6c5048bc4a7e51f/aaf2/file.py#L339-L348
|
[
"def write_reference_properties(self):\n f = self.cfb.open(\"/referenced properties\", 'w')\n byte_order = 0x4c\n path_count = len(self.weakref_table)\n pid_count = 0\n for path in self.weakref_table:\n pid_count += len(path)\n pid_count += 1 # null byte\n\n write_u8(f, byte_order)\n write_u16le(f, path_count)\n write_u32le(f, pid_count)\n for path in self.weakref_table:\n for pid in path:\n write_u16le(f, pid)\n write_u16le(f, 0) # null terminated\n"
] |
class AAFFile(object):
"""
AAF File Object. This is the entry point object for most of the API.
This object is designed to be like python's native open function.
It is recommended to create this object with the `aaf.open` alias.
It is also highly recommended to use the with statement.
For example. Opening existing AAF file readonly::
with aaf.open('/path/to/aaf_file.aaf', 'r') as f:
Opening new AAF file overwriting existing one::
with aaf.open('/path/to/aaf_file.aaf', 'w') as f:
Opening existing AAF in read and write::
with aaf.open('/path/to/aaf_file.aaf', 'rw') as f:
Opening in memory BytesIO file::
with aaf.open() as f:
"""
def __init__(self, path=None, mode='r', sector_size=4096, extensions=True, buffering=io.DEFAULT_BUFFER_SIZE):
if mode in ('r', 'rb'):
mode = 'rb'
elif mode in ('r+', 'rb+', 'rw'):
mode = 'rb+'
elif mode in ('w', 'w+', 'wb+'):
mode = 'wb+'
else:
raise ValueError("invalid mode: %s" % mode)
self.mode = mode
if path is None:
self.mode = 'wb+'
self.f = BytesIO()
else:
self.f = io.open(path, mode, buffering=buffering)
self.cfb = CompoundFileBinary(self.f, self.mode, sector_size=sector_size)
self.weakref_table = []
self.manager = AAFObjectManager(self)
self.create = AAFFactory(self)
self.is_open = True
if self.mode in ("rb", "rb+"):
self.read_reference_properties()
self.metadict = MetaDictionary(self)
self.metadict.dir = self.cfb.find('/MetaDictionary-1')
self.manager['/MetaDictionary-1'] = self.metadict
self.root = self.manager.read_object("/")
self.metadict.read_properties()
elif self.mode in ("wb+",):
self.setup_empty()
if extensions and self.writeable:
self.metadict.register_extensions()
@property
def header(self):
"""
:class:`aaf2.content.Header` object for AAF file.
"""
header_pid = 0x02
return self.root.property_entries[header_pid].value
@property
def content(self):
"""
:class:`aaf2.content.ContentStorage` object for AAF File. This has the Mob and EssenceData objects.
"""
return self.header['Content'].value
@property
def dictionary(self):
"""
:class:`aaf2.dictionary.Dictionary` for AAF file. The dictionary property has DefinitionObject objects.
"""
return self.header['Dictionary'].value
def setup_empty(self):
now = datetime.datetime.now()
self.metadict = MetaDictionary(self)
self.root = self.create.Root()
self.root.attach(self.cfb.find("/"))
self.root['MetaDictionary'].value = self.metadict
self.root['Header'].value = self.create.Header()
self.header['Dictionary'].value = self.create.Dictionary()
self.dictionary.setup_defaults()
self.header['Content'].value = self.create.ContentStorage()
self.header['OperationalPattern'].value = AUID("0d011201-0100-0000-060e-2b3404010105")
self.header['ObjectModelVersion'].value = 1
self.header['Version'].value = {u'major': 1, u'minor': 1}
i = self.create.Identification()
i['ProductName'].value = "PyAAF"
i['CompanyName'].value = "CompanyName"
i['ProductVersionString'].value = '2.0.0'
i['ProductID'].value = AUID("97e04c67-dbe6-4d11-bcd7-3a3a4253a2ef")
i['Date'].value = now
i['Platform'].value = sys.platform
i['GenerationAUID'].value = uuid4()
self.header['IdentificationList'].value = [i]
self.header['LastModified'].value = now
self.header['ByteOrder'].value = 0x4949
self.content['Mobs'].value = []
@property
def writeable(self):
return self.mode in ("wb+", "rb+")
def resovle_weakref(self, index, ref_pid, ref):
parent, p = self.weakref_prop(index)
return p[ref]
def weakref_prop(self, index):
path = self.weakref_table[index]
root = self.root
for pid in path[:-1]:
p = root.property_entries[pid]
root = p.value
p = root.property_entries[path[-1]]
return root, p
def weakref_index(self, pid_path):
if pid_path in self.weakref_table:
index = self.weakref_table.index(pid_path)
else:
index = len(self.weakref_table)
self.weakref_table.append(pid_path)
return index
def read_reference_properties(self):
s = self.cfb.open("/referenced properties")
f = io.BytesIO(s.read())
byte_order = read_u8(f)
if byte_order != 0x4c:
raise NotImplementedError("be byteorder")
path_count = read_u16le(f)
pid_count = read_u32le(f)
self.weakref_table = []
path = []
for i in range(pid_count):
pid = read_u16le(f)
if pid != 0:
path.append(pid)
else:
self.weakref_table.append(path)
path = []
assert len(self.weakref_table) == path_count
def write_reference_properties(self):
f = self.cfb.open("/referenced properties", 'w')
byte_order = 0x4c
path_count = len(self.weakref_table)
pid_count = 0
for path in self.weakref_table:
pid_count += len(path)
pid_count += 1 # null byte
write_u8(f, byte_order)
write_u16le(f, path_count)
write_u32le(f, pid_count)
for path in self.weakref_table:
for pid in path:
write_u16le(f, pid)
write_u16le(f, 0) # null terminated
def __exit__(self, exc_type, exc_value, traceback):
if (exc_type is None and exc_value is None and traceback is None):
self.close()
def __enter__(self):
return self
def dump(self):
self.root.dump()
def close(self):
"""
Close the file. A closed file cannot be read or written any more.
"""
self.save()
self.manager.remove_temp()
self.cfb.close()
self.is_open = False
self.f.close()
|
markreidvfx/pyaaf2
|
aaf2/file.py
|
AAFFile.close
|
python
|
def close(self):
self.save()
self.manager.remove_temp()
self.cfb.close()
self.is_open = False
self.f.close()
|
Close the file. A closed file cannot be read or written any more.
|
train
|
https://github.com/markreidvfx/pyaaf2/blob/37de8c10d3c3495cc00c705eb6c5048bc4a7e51f/aaf2/file.py#L350-L358
|
[
"def save(self):\n \"\"\"\n Writes current changes to disk and flushes modified objects in the\n AAFObjectManager\n \"\"\"\n if self.mode in (\"wb+\", 'rb+'):\n if not self.is_open:\n raise IOError(\"file closed\")\n self.write_reference_properties()\n self.manager.write_objects()\n"
] |
class AAFFile(object):
"""
AAF File Object. This is the entry point object for most of the API.
This object is designed to be like python's native open function.
It is recommended to create this object with the `aaf.open` alias.
It is also highly recommended to use the with statement.
For example. Opening existing AAF file readonly::
with aaf.open('/path/to/aaf_file.aaf', 'r') as f:
Opening new AAF file overwriting existing one::
with aaf.open('/path/to/aaf_file.aaf', 'w') as f:
Opening existing AAF in read and write::
with aaf.open('/path/to/aaf_file.aaf', 'rw') as f:
Opening in memory BytesIO file::
with aaf.open() as f:
"""
def __init__(self, path=None, mode='r', sector_size=4096, extensions=True, buffering=io.DEFAULT_BUFFER_SIZE):
if mode in ('r', 'rb'):
mode = 'rb'
elif mode in ('r+', 'rb+', 'rw'):
mode = 'rb+'
elif mode in ('w', 'w+', 'wb+'):
mode = 'wb+'
else:
raise ValueError("invalid mode: %s" % mode)
self.mode = mode
if path is None:
self.mode = 'wb+'
self.f = BytesIO()
else:
self.f = io.open(path, mode, buffering=buffering)
self.cfb = CompoundFileBinary(self.f, self.mode, sector_size=sector_size)
self.weakref_table = []
self.manager = AAFObjectManager(self)
self.create = AAFFactory(self)
self.is_open = True
if self.mode in ("rb", "rb+"):
self.read_reference_properties()
self.metadict = MetaDictionary(self)
self.metadict.dir = self.cfb.find('/MetaDictionary-1')
self.manager['/MetaDictionary-1'] = self.metadict
self.root = self.manager.read_object("/")
self.metadict.read_properties()
elif self.mode in ("wb+",):
self.setup_empty()
if extensions and self.writeable:
self.metadict.register_extensions()
@property
def header(self):
"""
:class:`aaf2.content.Header` object for AAF file.
"""
header_pid = 0x02
return self.root.property_entries[header_pid].value
@property
def content(self):
"""
:class:`aaf2.content.ContentStorage` object for AAF File. This has the Mob and EssenceData objects.
"""
return self.header['Content'].value
@property
def dictionary(self):
"""
:class:`aaf2.dictionary.Dictionary` for AAF file. The dictionary property has DefinitionObject objects.
"""
return self.header['Dictionary'].value
def setup_empty(self):
now = datetime.datetime.now()
self.metadict = MetaDictionary(self)
self.root = self.create.Root()
self.root.attach(self.cfb.find("/"))
self.root['MetaDictionary'].value = self.metadict
self.root['Header'].value = self.create.Header()
self.header['Dictionary'].value = self.create.Dictionary()
self.dictionary.setup_defaults()
self.header['Content'].value = self.create.ContentStorage()
self.header['OperationalPattern'].value = AUID("0d011201-0100-0000-060e-2b3404010105")
self.header['ObjectModelVersion'].value = 1
self.header['Version'].value = {u'major': 1, u'minor': 1}
i = self.create.Identification()
i['ProductName'].value = "PyAAF"
i['CompanyName'].value = "CompanyName"
i['ProductVersionString'].value = '2.0.0'
i['ProductID'].value = AUID("97e04c67-dbe6-4d11-bcd7-3a3a4253a2ef")
i['Date'].value = now
i['Platform'].value = sys.platform
i['GenerationAUID'].value = uuid4()
self.header['IdentificationList'].value = [i]
self.header['LastModified'].value = now
self.header['ByteOrder'].value = 0x4949
self.content['Mobs'].value = []
@property
def writeable(self):
return self.mode in ("wb+", "rb+")
def resovle_weakref(self, index, ref_pid, ref):
parent, p = self.weakref_prop(index)
return p[ref]
def weakref_prop(self, index):
path = self.weakref_table[index]
root = self.root
for pid in path[:-1]:
p = root.property_entries[pid]
root = p.value
p = root.property_entries[path[-1]]
return root, p
def weakref_index(self, pid_path):
if pid_path in self.weakref_table:
index = self.weakref_table.index(pid_path)
else:
index = len(self.weakref_table)
self.weakref_table.append(pid_path)
return index
def read_reference_properties(self):
s = self.cfb.open("/referenced properties")
f = io.BytesIO(s.read())
byte_order = read_u8(f)
if byte_order != 0x4c:
raise NotImplementedError("be byteorder")
path_count = read_u16le(f)
pid_count = read_u32le(f)
self.weakref_table = []
path = []
for i in range(pid_count):
pid = read_u16le(f)
if pid != 0:
path.append(pid)
else:
self.weakref_table.append(path)
path = []
assert len(self.weakref_table) == path_count
def write_reference_properties(self):
f = self.cfb.open("/referenced properties", 'w')
byte_order = 0x4c
path_count = len(self.weakref_table)
pid_count = 0
for path in self.weakref_table:
pid_count += len(path)
pid_count += 1 # null byte
write_u8(f, byte_order)
write_u16le(f, path_count)
write_u32le(f, pid_count)
for path in self.weakref_table:
for pid in path:
write_u16le(f, pid)
write_u16le(f, 0) # null terminated
def __exit__(self, exc_type, exc_value, traceback):
if (exc_type is None and exc_value is None and traceback is None):
self.close()
def __enter__(self):
return self
def dump(self):
self.root.dump()
def save(self):
"""
Writes current changes to disk and flushes modified objects in the
AAFObjectManager
"""
if self.mode in ("wb+", 'rb+'):
if not self.is_open:
raise IOError("file closed")
self.write_reference_properties()
self.manager.write_objects()
|
markreidvfx/pyaaf2
|
docs/source/conf.py
|
run_apidoc
|
python
|
def run_apidoc(_):
import os
dirname = os.path.dirname(__file__)
ignore_paths = [os.path.join(dirname, '../../aaf2/model'),]
# https://github.com/sphinx-doc/sphinx/blob/master/sphinx/ext/apidoc.py
argv = [
'--force',
'--no-toc',
'--separate',
'--module-first',
'--output-dir',
os.path.join(dirname, 'api'),
os.path.join(dirname, '../../aaf2'),
] + ignore_paths
from sphinx.ext import apidoc
apidoc.main(argv)
|
This method is required by the setup method below.
|
train
|
https://github.com/markreidvfx/pyaaf2/blob/37de8c10d3c3495cc00c705eb6c5048bc4a7e51f/docs/source/conf.py#L182-L199
| null |
# -*- coding: utf-8 -*-
#
# PyAAF documentation build configuration file, created by
# sphinx-quickstart on Wed Oct 18 20:01:47 2017.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = ['sphinx.ext.autodoc']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'pyaaf2'
copyright = u'2017, Mark Reid'
author = u'Mark Reid'
RELEASE = '1.0.0'
import aaf2
try:
RELEASE = aaf2.__version__
except:
pass
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = u'1.0'
# The full version, including alpha/beta/rc tags.
release = u'1.0.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ['_build']
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
html_theme_options = {'collapse_navigation': False}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Custom sidebar templates, must be a dictionary that maps document names
# to template names.
#
# This is required for the alabaster theme
# refs: http://alabaster.readthedocs.io/en/latest/installation.html#sidebars
html_sidebars = {
'**': [
'about.html',
'navigation.html',
'relations.html', # needs 'show_related': True theme option to display
'searchbox.html',
'donate.html',
]
}
# -- Options for HTMLHelp output ------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'pyaaf2doc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'pyaaf2.tex', u'pyaaf2 Documentation',
u'Mark Reid', 'manual'),
]
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'pyaaf2', u'pyaaf2 Documentation',
[author], 1)
]
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'pyaaf2', u'pyaaf2 Documentation',
author, 'pyaaf2', 'One line description of project.',
'Miscellaneous'),
]
intersphinx_mapping = {'http://docs.python.org/': None}
autodoc_member_order = 'bysource'
autodoc_default_flags = ['show-inheritance']
def setup(app):
"""This method is a hook into the Sphinx builder system and injects the
apidoc module into it so it runs autodoc before running build.
If you mess with this, you may not see any effect in a local build, this
was added to get api documentation building on the ReadTheDocs server.
"""
app.connect('builder-inited', run_apidoc)
|
markreidvfx/pyaaf2
|
aaf2/mobid.py
|
MobID.from_dict
|
python
|
def from_dict(self, d):
self.length = d.get("length", 0)
self.instanceHigh = d.get("instanceHigh", 0)
self.instanceMid = d.get("instanceMid", 0)
self.instanceLow = d.get("instanceLow", 0)
material = d.get("material", {'Data1':0, 'Data2':0, 'Data3':0, 'Data4': [0 for i in range(8)]})
self.Data1 = material.get('Data1', 0)
self.Data2 = material.get('Data2', 0)
self.Data3 = material.get('Data3', 0)
self.Data4 = material.get("Data4", [0 for i in range(8)])
self.SMPTELabel = d.get("SMPTELabel", [0 for i in range(12)])
|
Set MobID from a dict
|
train
|
https://github.com/markreidvfx/pyaaf2/blob/37de8c10d3c3495cc00c705eb6c5048bc4a7e51f/aaf2/mobid.py#L280-L296
| null |
class MobID(object):
__slots__ = ('bytes_le')
def __init__(self, mobid=None, bytes_le=None, int=None):
if bytes_le:
self.bytes_le = bytearray(bytes_le)
else:
self.bytes_le = bytearray(32)
if mobid is not None:
self.urn = mobid
if int is not None:
self.int = int
@staticmethod
def new():
"""
Static method for generating unique MobIDs. Uses uuid.uuid4() for generation.
"""
return UniqueMobID()
@property
def material(self):
"""
MobID material representation as a UUID
"""
return auid.AUID(bytes_le=self.bytes_le[16:])
@material.setter
def material(self, value):
self.bytes_le[16:] = value.bytes_le
@property
def SMPTELabel(self):
return self.bytes_le[0:12]
@SMPTELabel.setter
def SMPTELabel(self, value):
struct.pack_into(str('12B'), self.bytes_le, 0, *value)
@property
def length(self):
return self.bytes_le[12]
@length.setter
def length(self, value):
self.bytes_le[12] = value
@property
def instanceHigh(self):
return self.bytes_le[13]
@instanceHigh.setter
def instanceHigh(self, value):
self.bytes_le[13] = value
@property
def instanceMid(self):
return self.bytes_le[14]
@instanceMid.setter
def instanceMid(self, value):
self.bytes_le[14] = value
@property
def instanceLow(self):
return self.bytes_le[15]
@instanceLow.setter
def instanceLow(self, value):
self.bytes_le[15] = value
@property
def Data1(self):
return unpack_u32le_from(self.bytes_le, 16)
@Data1.setter
def Data1(self, value):
struct.pack_into(str('<I'), self.bytes_le, 16, value)
@property
def Data2(self):
return unpack_u16le_from(self.bytes_le, 20)
@Data2.setter
def Data2(self, value):
struct.pack_into(str('<H'), self.bytes_le, 20, value)
@property
def Data3(self):
return unpack_u16le_from(self.bytes_le, 22)
@Data3.setter
def Data3(self, value):
struct.pack_into(str('<H'), self.bytes_le, 22, value)
@property
def Data4(self):
return self.bytes_le[24:32]
@Data4.setter
def Data4(self, value):
struct.pack_into(str('8B'), self.bytes_le, 24, *value)
def to_dict(self):
"""
MobID representation as dict
"""
material = {'Data1': self.Data1,
'Data2': self.Data2,
'Data3': self.Data3,
'Data4': list(self.Data4)
}
return {'material':material,
'length': self.length,
'instanceHigh': self.instanceHigh,
'instanceMid': self.instanceMid,
'instanceLow': self.instanceLow,
'SMPTELabel': list(self.SMPTELabel)
}
@property
def int(self):
"""
MobID representation as a int
"""
return int_from_bytes(self.bytes_le, byte_order='big')
@int.setter
def int(self, value):
# NOTE: interpreted as big endian
self.bytes_le = bytearray(bytes_from_int(value, 32, byte_order='big'))
def __int__(self):
return self.int
def __eq__(self, other):
if isinstance(other, MobID):
return self.bytes_le == other.bytes_le
return NotImplemented
def __lt__(self, other):
if isinstance(other, MobID):
return self.int < other.int
return NotImplemented
def __le__(self, other):
if isinstance(other, MobID):
return self.int <= other.int
return NotImplemented
def __gt__(self, other):
if isinstance(other, MobID):
return self.int > other.int
return NotImplemented
def __ge__(self, other):
if isinstance(other, MobID):
return self.int >= other.int
return NotImplemented
def __hash__(self):
return hash(bytes(self.bytes_le))
@property
def urn(self):
"""
MobID Uniform Resource Name representation.
https://en.wikipedia.org/wiki/Uniform_Resource_Name
"""
SMPTELabel = self.SMPTELabel
Data4 = self.Data4
# handle case UMIDs where the material number is half swapped
if (SMPTELabel[11] == 0x00 and
Data4[0] == 0x06 and Data4[1] == 0x0E and
Data4[2] == 0x2B and Data4[3] == 0x34 and
Data4[4] == 0x7F and Data4[5] == 0x7F):
# print("case 1")
f = "urn:smpte:umid:%02x%02x%02x%02x.%02x%02x%02x%02x.%02x%02x%02x%02x." + \
"%02x" + \
"%02x%02x%02x." + \
"%02x%02x%02x%02x.%02x%02x%02x%02x.%08x.%04x%04x"
return f % (
SMPTELabel[0], SMPTELabel[1], SMPTELabel[2], SMPTELabel[3],
SMPTELabel[4], SMPTELabel[5], SMPTELabel[6], SMPTELabel[7],
SMPTELabel[8], SMPTELabel[9], SMPTELabel[10], SMPTELabel[11],
self.length,
self.instanceHigh, self.instanceMid, self.instanceLow,
Data4[0], Data4[1], Data4[2], Data4[3],
Data4[4], Data4[5], Data4[6], Data4[7],
self.Data1, self.Data2, self.Data3)
else:
# print("case 2")
f = "urn:smpte:umid:%02x%02x%02x%02x.%02x%02x%02x%02x.%02x%02x%02x%02x." + \
"%02x" + \
"%02x%02x%02x." + \
"%08x.%04x%04x.%02x%02x%02x%02x.%02x%02x%02x%02x"
return f % (
SMPTELabel[0], SMPTELabel[1], SMPTELabel[2], SMPTELabel[3],
SMPTELabel[4], SMPTELabel[5], SMPTELabel[6], SMPTELabel[7],
SMPTELabel[8], SMPTELabel[9], SMPTELabel[10], SMPTELabel[11],
self.length,
self.instanceHigh, self.instanceMid, self.instanceLow,
self.Data1, self.Data2, self.Data3,
Data4[0], Data4[1], Data4[2], Data4[3],
Data4[4], Data4[5], Data4[6], Data4[7])
@urn.setter
def urn(self, value):
s = str(value).lower()
for item in ("urn:smpte:umid:", ".", '-', '0x'):
s = s.replace(item, '')
assert len(s) == 64
SMPTELabel = [0 for i in range(12)]
start = 0
for i in range(12):
end = start + 2
v = s[start:end]
SMPTELabel[i] = int(v, 16)
start = end
self.SMPTELabel = SMPTELabel
self.length = int(s[24:26], 16)
self.instanceHigh = int(s[26:28], 16)
self.instanceMid = int(s[28:30], 16)
self.instanceLow = int(s[30:32], 16)
start = 32
data = [0 for i in range(6)]
for i in range(6):
end = start + 2
v = s[start:end]
data[i] = int(v, 16)
start = end
# print(s[32:start])
if (SMPTELabel[11] == 0x00 and
data[0] == 0x06 and data[1] == 0x0E and
data[2] == 0x2B and data[3] == 0x34 and
data[4] == 0x7F and data[5] == 0x7F):
start = 32
data4 = [0 for i in range(8)]
for i in range(8):
end = start + 2
v = s[start:end]
data4[i] = int(v, 16)
start = end
self.Data4 = data4
self.Data1 = int(s[48:56], 16)
self.Data2 = int(s[56:60], 16)
self.Data3 = int(s[60:64], 16)
else:
self.Data1 = int(s[32:40], 16)
self.Data2 = int(s[40:44], 16)
self.Data3 = int(s[44:48], 16)
start = 48
data4 = [0 for i in range(8)]
for i in range(8):
end = start + 2
v = s[start:end]
data4[i] = int(v, 16)
start = end
self.Data4 = data4
def __repr__(self):
return str(self.urn)
|
markreidvfx/pyaaf2
|
aaf2/mobid.py
|
MobID.to_dict
|
python
|
def to_dict(self):
material = {'Data1': self.Data1,
'Data2': self.Data2,
'Data3': self.Data3,
'Data4': list(self.Data4)
}
return {'material':material,
'length': self.length,
'instanceHigh': self.instanceHigh,
'instanceMid': self.instanceMid,
'instanceLow': self.instanceLow,
'SMPTELabel': list(self.SMPTELabel)
}
|
MobID representation as dict
|
train
|
https://github.com/markreidvfx/pyaaf2/blob/37de8c10d3c3495cc00c705eb6c5048bc4a7e51f/aaf2/mobid.py#L298-L315
| null |
class MobID(object):
__slots__ = ('bytes_le')
def __init__(self, mobid=None, bytes_le=None, int=None):
if bytes_le:
self.bytes_le = bytearray(bytes_le)
else:
self.bytes_le = bytearray(32)
if mobid is not None:
self.urn = mobid
if int is not None:
self.int = int
@staticmethod
def new():
"""
Static method for generating unique MobIDs. Uses uuid.uuid4() for generation.
"""
return UniqueMobID()
@property
def material(self):
"""
MobID material representation as a UUID
"""
return auid.AUID(bytes_le=self.bytes_le[16:])
@material.setter
def material(self, value):
self.bytes_le[16:] = value.bytes_le
@property
def SMPTELabel(self):
return self.bytes_le[0:12]
@SMPTELabel.setter
def SMPTELabel(self, value):
struct.pack_into(str('12B'), self.bytes_le, 0, *value)
@property
def length(self):
return self.bytes_le[12]
@length.setter
def length(self, value):
self.bytes_le[12] = value
@property
def instanceHigh(self):
return self.bytes_le[13]
@instanceHigh.setter
def instanceHigh(self, value):
self.bytes_le[13] = value
@property
def instanceMid(self):
return self.bytes_le[14]
@instanceMid.setter
def instanceMid(self, value):
self.bytes_le[14] = value
@property
def instanceLow(self):
return self.bytes_le[15]
@instanceLow.setter
def instanceLow(self, value):
self.bytes_le[15] = value
@property
def Data1(self):
return unpack_u32le_from(self.bytes_le, 16)
@Data1.setter
def Data1(self, value):
struct.pack_into(str('<I'), self.bytes_le, 16, value)
@property
def Data2(self):
return unpack_u16le_from(self.bytes_le, 20)
@Data2.setter
def Data2(self, value):
struct.pack_into(str('<H'), self.bytes_le, 20, value)
@property
def Data3(self):
return unpack_u16le_from(self.bytes_le, 22)
@Data3.setter
def Data3(self, value):
struct.pack_into(str('<H'), self.bytes_le, 22, value)
@property
def Data4(self):
return self.bytes_le[24:32]
@Data4.setter
def Data4(self, value):
struct.pack_into(str('8B'), self.bytes_le, 24, *value)
def from_dict(self, d):
"""
Set MobID from a dict
"""
self.length = d.get("length", 0)
self.instanceHigh = d.get("instanceHigh", 0)
self.instanceMid = d.get("instanceMid", 0)
self.instanceLow = d.get("instanceLow", 0)
material = d.get("material", {'Data1':0, 'Data2':0, 'Data3':0, 'Data4': [0 for i in range(8)]})
self.Data1 = material.get('Data1', 0)
self.Data2 = material.get('Data2', 0)
self.Data3 = material.get('Data3', 0)
self.Data4 = material.get("Data4", [0 for i in range(8)])
self.SMPTELabel = d.get("SMPTELabel", [0 for i in range(12)])
@property
def int(self):
"""
MobID representation as a int
"""
return int_from_bytes(self.bytes_le, byte_order='big')
@int.setter
def int(self, value):
# NOTE: interpreted as big endian
self.bytes_le = bytearray(bytes_from_int(value, 32, byte_order='big'))
def __int__(self):
return self.int
def __eq__(self, other):
if isinstance(other, MobID):
return self.bytes_le == other.bytes_le
return NotImplemented
def __lt__(self, other):
if isinstance(other, MobID):
return self.int < other.int
return NotImplemented
def __le__(self, other):
if isinstance(other, MobID):
return self.int <= other.int
return NotImplemented
def __gt__(self, other):
if isinstance(other, MobID):
return self.int > other.int
return NotImplemented
def __ge__(self, other):
if isinstance(other, MobID):
return self.int >= other.int
return NotImplemented
def __hash__(self):
return hash(bytes(self.bytes_le))
@property
def urn(self):
"""
MobID Uniform Resource Name representation.
https://en.wikipedia.org/wiki/Uniform_Resource_Name
"""
SMPTELabel = self.SMPTELabel
Data4 = self.Data4
# handle case UMIDs where the material number is half swapped
if (SMPTELabel[11] == 0x00 and
Data4[0] == 0x06 and Data4[1] == 0x0E and
Data4[2] == 0x2B and Data4[3] == 0x34 and
Data4[4] == 0x7F and Data4[5] == 0x7F):
# print("case 1")
f = "urn:smpte:umid:%02x%02x%02x%02x.%02x%02x%02x%02x.%02x%02x%02x%02x." + \
"%02x" + \
"%02x%02x%02x." + \
"%02x%02x%02x%02x.%02x%02x%02x%02x.%08x.%04x%04x"
return f % (
SMPTELabel[0], SMPTELabel[1], SMPTELabel[2], SMPTELabel[3],
SMPTELabel[4], SMPTELabel[5], SMPTELabel[6], SMPTELabel[7],
SMPTELabel[8], SMPTELabel[9], SMPTELabel[10], SMPTELabel[11],
self.length,
self.instanceHigh, self.instanceMid, self.instanceLow,
Data4[0], Data4[1], Data4[2], Data4[3],
Data4[4], Data4[5], Data4[6], Data4[7],
self.Data1, self.Data2, self.Data3)
else:
# print("case 2")
f = "urn:smpte:umid:%02x%02x%02x%02x.%02x%02x%02x%02x.%02x%02x%02x%02x." + \
"%02x" + \
"%02x%02x%02x." + \
"%08x.%04x%04x.%02x%02x%02x%02x.%02x%02x%02x%02x"
return f % (
SMPTELabel[0], SMPTELabel[1], SMPTELabel[2], SMPTELabel[3],
SMPTELabel[4], SMPTELabel[5], SMPTELabel[6], SMPTELabel[7],
SMPTELabel[8], SMPTELabel[9], SMPTELabel[10], SMPTELabel[11],
self.length,
self.instanceHigh, self.instanceMid, self.instanceLow,
self.Data1, self.Data2, self.Data3,
Data4[0], Data4[1], Data4[2], Data4[3],
Data4[4], Data4[5], Data4[6], Data4[7])
@urn.setter
def urn(self, value):
s = str(value).lower()
for item in ("urn:smpte:umid:", ".", '-', '0x'):
s = s.replace(item, '')
assert len(s) == 64
SMPTELabel = [0 for i in range(12)]
start = 0
for i in range(12):
end = start + 2
v = s[start:end]
SMPTELabel[i] = int(v, 16)
start = end
self.SMPTELabel = SMPTELabel
self.length = int(s[24:26], 16)
self.instanceHigh = int(s[26:28], 16)
self.instanceMid = int(s[28:30], 16)
self.instanceLow = int(s[30:32], 16)
start = 32
data = [0 for i in range(6)]
for i in range(6):
end = start + 2
v = s[start:end]
data[i] = int(v, 16)
start = end
# print(s[32:start])
if (SMPTELabel[11] == 0x00 and
data[0] == 0x06 and data[1] == 0x0E and
data[2] == 0x2B and data[3] == 0x34 and
data[4] == 0x7F and data[5] == 0x7F):
start = 32
data4 = [0 for i in range(8)]
for i in range(8):
end = start + 2
v = s[start:end]
data4[i] = int(v, 16)
start = end
self.Data4 = data4
self.Data1 = int(s[48:56], 16)
self.Data2 = int(s[56:60], 16)
self.Data3 = int(s[60:64], 16)
else:
self.Data1 = int(s[32:40], 16)
self.Data2 = int(s[40:44], 16)
self.Data3 = int(s[44:48], 16)
start = 48
data4 = [0 for i in range(8)]
for i in range(8):
end = start + 2
v = s[start:end]
data4[i] = int(v, 16)
start = end
self.Data4 = data4
def __repr__(self):
return str(self.urn)
|
markreidvfx/pyaaf2
|
aaf2/mobid.py
|
MobID.urn
|
python
|
def urn(self):
SMPTELabel = self.SMPTELabel
Data4 = self.Data4
# handle case UMIDs where the material number is half swapped
if (SMPTELabel[11] == 0x00 and
Data4[0] == 0x06 and Data4[1] == 0x0E and
Data4[2] == 0x2B and Data4[3] == 0x34 and
Data4[4] == 0x7F and Data4[5] == 0x7F):
# print("case 1")
f = "urn:smpte:umid:%02x%02x%02x%02x.%02x%02x%02x%02x.%02x%02x%02x%02x." + \
"%02x" + \
"%02x%02x%02x." + \
"%02x%02x%02x%02x.%02x%02x%02x%02x.%08x.%04x%04x"
return f % (
SMPTELabel[0], SMPTELabel[1], SMPTELabel[2], SMPTELabel[3],
SMPTELabel[4], SMPTELabel[5], SMPTELabel[6], SMPTELabel[7],
SMPTELabel[8], SMPTELabel[9], SMPTELabel[10], SMPTELabel[11],
self.length,
self.instanceHigh, self.instanceMid, self.instanceLow,
Data4[0], Data4[1], Data4[2], Data4[3],
Data4[4], Data4[5], Data4[6], Data4[7],
self.Data1, self.Data2, self.Data3)
else:
# print("case 2")
f = "urn:smpte:umid:%02x%02x%02x%02x.%02x%02x%02x%02x.%02x%02x%02x%02x." + \
"%02x" + \
"%02x%02x%02x." + \
"%08x.%04x%04x.%02x%02x%02x%02x.%02x%02x%02x%02x"
return f % (
SMPTELabel[0], SMPTELabel[1], SMPTELabel[2], SMPTELabel[3],
SMPTELabel[4], SMPTELabel[5], SMPTELabel[6], SMPTELabel[7],
SMPTELabel[8], SMPTELabel[9], SMPTELabel[10], SMPTELabel[11],
self.length,
self.instanceHigh, self.instanceMid, self.instanceLow,
self.Data1, self.Data2, self.Data3,
Data4[0], Data4[1], Data4[2], Data4[3],
Data4[4], Data4[5], Data4[6], Data4[7])
|
MobID Uniform Resource Name representation.
https://en.wikipedia.org/wiki/Uniform_Resource_Name
|
train
|
https://github.com/markreidvfx/pyaaf2/blob/37de8c10d3c3495cc00c705eb6c5048bc4a7e51f/aaf2/mobid.py#L360-L405
| null |
class MobID(object):
__slots__ = ('bytes_le')
def __init__(self, mobid=None, bytes_le=None, int=None):
if bytes_le:
self.bytes_le = bytearray(bytes_le)
else:
self.bytes_le = bytearray(32)
if mobid is not None:
self.urn = mobid
if int is not None:
self.int = int
@staticmethod
def new():
"""
Static method for generating unique MobIDs. Uses uuid.uuid4() for generation.
"""
return UniqueMobID()
@property
def material(self):
"""
MobID material representation as a UUID
"""
return auid.AUID(bytes_le=self.bytes_le[16:])
@material.setter
def material(self, value):
self.bytes_le[16:] = value.bytes_le
@property
def SMPTELabel(self):
return self.bytes_le[0:12]
@SMPTELabel.setter
def SMPTELabel(self, value):
struct.pack_into(str('12B'), self.bytes_le, 0, *value)
@property
def length(self):
return self.bytes_le[12]
@length.setter
def length(self, value):
self.bytes_le[12] = value
@property
def instanceHigh(self):
return self.bytes_le[13]
@instanceHigh.setter
def instanceHigh(self, value):
self.bytes_le[13] = value
@property
def instanceMid(self):
return self.bytes_le[14]
@instanceMid.setter
def instanceMid(self, value):
self.bytes_le[14] = value
@property
def instanceLow(self):
return self.bytes_le[15]
@instanceLow.setter
def instanceLow(self, value):
self.bytes_le[15] = value
@property
def Data1(self):
return unpack_u32le_from(self.bytes_le, 16)
@Data1.setter
def Data1(self, value):
struct.pack_into(str('<I'), self.bytes_le, 16, value)
@property
def Data2(self):
return unpack_u16le_from(self.bytes_le, 20)
@Data2.setter
def Data2(self, value):
struct.pack_into(str('<H'), self.bytes_le, 20, value)
@property
def Data3(self):
return unpack_u16le_from(self.bytes_le, 22)
@Data3.setter
def Data3(self, value):
struct.pack_into(str('<H'), self.bytes_le, 22, value)
@property
def Data4(self):
return self.bytes_le[24:32]
@Data4.setter
def Data4(self, value):
struct.pack_into(str('8B'), self.bytes_le, 24, *value)
def from_dict(self, d):
"""
Set MobID from a dict
"""
self.length = d.get("length", 0)
self.instanceHigh = d.get("instanceHigh", 0)
self.instanceMid = d.get("instanceMid", 0)
self.instanceLow = d.get("instanceLow", 0)
material = d.get("material", {'Data1':0, 'Data2':0, 'Data3':0, 'Data4': [0 for i in range(8)]})
self.Data1 = material.get('Data1', 0)
self.Data2 = material.get('Data2', 0)
self.Data3 = material.get('Data3', 0)
self.Data4 = material.get("Data4", [0 for i in range(8)])
self.SMPTELabel = d.get("SMPTELabel", [0 for i in range(12)])
def to_dict(self):
"""
MobID representation as dict
"""
material = {'Data1': self.Data1,
'Data2': self.Data2,
'Data3': self.Data3,
'Data4': list(self.Data4)
}
return {'material':material,
'length': self.length,
'instanceHigh': self.instanceHigh,
'instanceMid': self.instanceMid,
'instanceLow': self.instanceLow,
'SMPTELabel': list(self.SMPTELabel)
}
@property
def int(self):
"""
MobID representation as a int
"""
return int_from_bytes(self.bytes_le, byte_order='big')
@int.setter
def int(self, value):
# NOTE: interpreted as big endian
self.bytes_le = bytearray(bytes_from_int(value, 32, byte_order='big'))
def __int__(self):
return self.int
def __eq__(self, other):
if isinstance(other, MobID):
return self.bytes_le == other.bytes_le
return NotImplemented
def __lt__(self, other):
if isinstance(other, MobID):
return self.int < other.int
return NotImplemented
def __le__(self, other):
if isinstance(other, MobID):
return self.int <= other.int
return NotImplemented
def __gt__(self, other):
if isinstance(other, MobID):
return self.int > other.int
return NotImplemented
def __ge__(self, other):
if isinstance(other, MobID):
return self.int >= other.int
return NotImplemented
def __hash__(self):
return hash(bytes(self.bytes_le))
@property
@urn.setter
def urn(self, value):
s = str(value).lower()
for item in ("urn:smpte:umid:", ".", '-', '0x'):
s = s.replace(item, '')
assert len(s) == 64
SMPTELabel = [0 for i in range(12)]
start = 0
for i in range(12):
end = start + 2
v = s[start:end]
SMPTELabel[i] = int(v, 16)
start = end
self.SMPTELabel = SMPTELabel
self.length = int(s[24:26], 16)
self.instanceHigh = int(s[26:28], 16)
self.instanceMid = int(s[28:30], 16)
self.instanceLow = int(s[30:32], 16)
start = 32
data = [0 for i in range(6)]
for i in range(6):
end = start + 2
v = s[start:end]
data[i] = int(v, 16)
start = end
# print(s[32:start])
if (SMPTELabel[11] == 0x00 and
data[0] == 0x06 and data[1] == 0x0E and
data[2] == 0x2B and data[3] == 0x34 and
data[4] == 0x7F and data[5] == 0x7F):
start = 32
data4 = [0 for i in range(8)]
for i in range(8):
end = start + 2
v = s[start:end]
data4[i] = int(v, 16)
start = end
self.Data4 = data4
self.Data1 = int(s[48:56], 16)
self.Data2 = int(s[56:60], 16)
self.Data3 = int(s[60:64], 16)
else:
self.Data1 = int(s[32:40], 16)
self.Data2 = int(s[40:44], 16)
self.Data3 = int(s[44:48], 16)
start = 48
data4 = [0 for i in range(8)]
for i in range(8):
end = start + 2
v = s[start:end]
data4[i] = int(v, 16)
start = end
self.Data4 = data4
def __repr__(self):
return str(self.urn)
|
markreidvfx/pyaaf2
|
aaf2/ama.py
|
wave_infochunk
|
python
|
def wave_infochunk(path):
with open(path,'rb') as file:
if file.read(4) != b"RIFF":
return None
data_size = file.read(4) # container size
if file.read(4) != b"WAVE":
return None
while True:
chunkid = file.read(4)
sizebuf = file.read(4)
if len(sizebuf) < 4 or len(chunkid) < 4:
return None
size = struct.unpack(b'<L', sizebuf )[0]
if chunkid[0:3] != b"fmt":
if size % 2 == 1:
seek = size + 1
else:
seek = size
file.seek(size,1)
else:
return bytearray(b"RIFF" + data_size + b"WAVE" + chunkid + sizebuf + file.read(size))
|
Returns a bytearray of the WAVE RIFF header and fmt
chunk for a `WAVEDescriptor` `Summary`
|
train
|
https://github.com/markreidvfx/pyaaf2/blob/37de8c10d3c3495cc00c705eb6c5048bc4a7e51f/aaf2/ama.py#L329-L353
| null |
from __future__ import (
unicode_literals,
absolute_import,
print_function,
division,
)
import os
import sys
from .rational import AAFRational
from . import video
from . import audio
from . import mxf
from .auid import AUID
import struct
MediaContainerGUID = {
"Generic" : (AUID("b22697a2-3442-44e8-bb8f-7a1cd290ebf1"),
('.3g2', '.3gp', '.aac', '.au', '.avi', '.bmp', '.dv', '.gif',
'.jfif', '.jpeg', '.jpg', '.m4a', '.mid', '.moov', '.mov',
'.movie', '.mp2', '.mp3', '.mp4', '.mpa', '.mpe', '.mpeg',
'.mpg', '.png', '.psd', '.qt', '.tif', '.tiff',)),
"AVCHD" : (AUID("f37d624b307d4ef59bebc539046cad54"),
('.mts', '.m2ts',)),
"ImageSequencer" : (AUID("4964178d-b3d5-485f-8e98-beb89d92a5f4"),
('.dpx',)),
"CanonRaw" : (AUID("0f299461-ee19-459f-8ae6-93e65c76a892"),
('.rmf',)),
"WaveAiff" : (AUID("3711d3cc-62d0-49d7-b0ae-c118101d1a16"),
('.wav', '.wave', '.bwf', '.aif', '.aiff', '.aifc', '.cdda',)),
"MXF" : (AUID("60eb8921-2a02-4406-891c-d9b6a6ae0645"),
('.mxf',)),
"QuickTime" : (AUID("781f84b7-b989-4534-8a07-c595cb9a6fb8"),
('.mov', '.mp4', '.m4v', '.mpg', '.mpe', '.mpeg', '.3gp', '.3g2',
'.qt', '.moov', '.movie', '.avi', '.mp2', '.mp3', '.m4a', '.wav',
'.aiff', '.aif', '.au', '.aac', '.mid', '.mpa', '.gif', '.jpg',
'.jpeg', '.jfif', '.tif', '.tiff', '.png', '.bmp', '.psd', '.dv')),
}
def pixel_sizes(pix_fmt):
h_samp = 2
v_samp = 2
depth = 8
if pix_fmt.count('420'):
h_samp = 2
v_samp = 2
elif pix_fmt.count('422'):
h_samp = 2
v_samp = 1
elif pix_fmt.count('444'):
h_samp = 1
v_samp = 1
for i in [8, 10, 12, 16]:
if pix_fmt.count("p%d" % i):
depth = i
break
return (depth, h_samp, v_samp)
def get_avc_compression(meta):
profile = meta.get('profile', None)
key = 'CompressedPicture'
if profile == "Baseline":
key = 'AVCBaselineUnconstrained'
elif profile == "Constrained Baseline":
key = 'AVCConstrainedBaselineUnconstrained'
elif profile == "Main":
key = 'AVCMainUnconstrained'
elif profile == "Extended":
key ='AVCExtendedUnconstrained'
elif profile == "High":
key = 'AVCHighUnconstrained'
elif profile == "High 10":
key = 'AVCHigh10Unconstrained'
elif profile == "High 10 Intra":
key = 'AVCHigh10IntraUnconstrained'
elif profile == "High 4:2:2":
key = 'AVCHigh422Unconstrained'
elif profile == "High 4:2:2 Intra":
key = 'AVCHigh422IntraUnconstrained'
elif profile == "High 4:4:4":
# key = 'AVCHigh444IntraUnconstrained'
key = 'CompressedPicture'
elif profile == "High 4:4:4 Predictive":
# key = 'AVCHigh444PredictiveUnconstrained'
key = 'CompressedPicture'
elif profile == "High 4:4:4 Intra":
# key = 'AVCHigh444IntraUnconstrained'
key = 'CompressedPicture'
elif profile == 'CAVLC 4:4:4':
# key = 'AVCCAVLC444IntraUnconstrained'
key = 'CompressedPicture'
return video.compression_ids[key]
def get_compression(meta):
codec_name = meta.get('codec_name', None)
if codec_name == 'mjpeg':
return video.compression_ids['mjpeg']
if codec_name == 'h264':
return get_avc_compression(meta)
return video.compression_ids['CompressedPicture']
def create_video_descriptor(f, meta):
d = f.create.CDCIDescriptor()
depth, h_samp, v_samp = pixel_sizes(meta['pix_fmt'])
width = meta['width']
height = meta['height']
# aspect_ratio = meta.get('display_aspect_ratio', "").replace(":", '/')
aspect_ratio = "%d/%d" % (width, height)
d['ComponentWidth'].value = depth
d['HorizontalSubsampling'].value = h_samp
d['VerticalSubsampling'].value = v_samp
d['FrameLayout'].value = 'FullFrame'
d['VideoLineMap'].value = [0,0]
# d['VideoLineMap'].value = [42, 0]
d['ImageAspectRatio'].value = aspect_ratio
d['StoredWidth'].value = width
d['StoredHeight'].value = height
d['SampleRate'].value = meta['avg_frame_rate']
compression = get_compression(meta)
d['Compression'].value = compression
# d['ResolutionID'].value = 2900
# d['Compression'].value = AUID('04010202-0000-0000-060e-2b3404010101')
d['Length'].value = int(meta['nb_frames'])
return d
def create_audio_descriptor(f, meta):
d = f.create.PCMDescriptor()
rate = meta['sample_rate']
d['SampleRate'].value = rate
d['AudioSamplingRate'].value = rate
d['Channels'].value = meta['channels']
d['AverageBPS'].value = int(meta['bit_rate'])
bit_depth, block_align = audio.audio_format_sizes.get(meta['sample_fmt'], (0,0))
d['QuantizationBits'].value = bit_depth
d['BlockAlign'].value = block_align
duration = float(meta['duration'])
d['Length'].value = int(duration * float(rate))
d['Compression'].value = AUID('04020202-0000-0000-060e-2b3404010101')
return d
def create_network_locator(f, absolute_path):
n = f.create.NetworkLocator()
if sys.version_info[0] < 3:
import urllib
n['URLString'].value = 'file://' + urllib.pathname2url(absolute_path)
else:
import pathlib
n['URLString'].value = pathlib.Path(absolute_path).as_uri()
return n
def guess_edit_rate(metadata):
for st in metadata['streams']:
codec_type = st['codec_type']
if codec_type == 'video':
return AAFRational(st['avg_frame_rate'])
elif codec_type == 'audio':
return AAFRational(st['sample_rate'])
def guess_length(metadata, edit_rate):
for st in metadata['streams']:
codec_type = st['codec_type']
if codec_type == 'video':
return int(st['nb_frames'])
def get_container_guid(metadata):
for st in metadata['streams']:
codec_name = st['codec_name']
if codec_name in ('prores', ):
return MediaContainerGUID['QuickTime']
return MediaContainerGUID['Generic']
def create_ama_link(f, path, metadata):
tape_length = 4142016
basename = os.path.basename(path)
name, ext = os.path.splitext(basename)
path = os.path.abspath(path)
if ext.lower() == '.mxf':
m = mxf.MXFFile(path)
m.ama = True
m.dump()
return m.link(f)
edit_rate = guess_edit_rate(metadata)
length = guess_length(metadata, edit_rate)
container_guid, formats = get_container_guid(metadata)
master_mob = f.create.MasterMob()
src_mob = f.create.SourceMob()
tape_mob = f.create.SourceMob()
master_mob.name = basename
f.content.mobs.append(master_mob)
f.content.mobs.append(src_mob)
f.content.mobs.append(tape_mob)
tape_mob.descriptor = f.create.ImportDescriptor()
tape_mob.descriptor['MediaContainerGUID'].value = container_guid
tape_mob.descriptor['Locator'].append(create_network_locator(f, path))
t = tape_mob.create_empty_sequence_slot(edit_rate, media_kind='timecode')
tc = f.create.Timecode(int(float(edit_rate)+0.5), drop=False)
tc.length = tape_length
tc.start = 0
t.segment.length = tape_length
t.segment.components.append(tc)
descriptors = []
for st in metadata['streams']:
codec_name = st.get('codec_name', None)
codec_type = st['codec_type']
if codec_type == 'video':
desc = create_video_descriptor(f, st)
desc['Locator'].append(create_network_locator(f, path))
desc['MediaContainerGUID'].value = container_guid
descriptors.append(desc)
# MC Quicktime plugin will error if theis is not set to something...
src_mob.comments['Video'] = codec_name
tape_slot = tape_mob.create_empty_sequence_slot(edit_rate, media_kind='picture')
tape_slot.segment.length = tape_length
nul_ref = f.create.SourceClip(media_kind='picture')
nul_ref.length = tape_length
tape_slot.segment.components.append(nul_ref)
tape_clip = tape_mob.create_source_clip(tape_slot.slot_id)
tape_clip.length = length
tape_clip.media_kind = 'picture'
src_slot = src_mob.create_empty_sequence_slot(edit_rate, media_kind='picture')
src_slot.segment.length = length
src_slot.segment.components.append(tape_clip)
# src_slot = src_mob.create_empty_slot(edit_rate, media_kind='picture')
# src_slot.segment.length = length
clip = src_mob.create_source_clip(src_slot.slot_id)
clip.length = length
clip.media_kind = 'picture'
master_slot = master_mob.create_empty_sequence_slot(edit_rate, media_kind='picture')
master_slot.segment.components.append(clip)
master_slot.segment.length = length
elif codec_type == 'audio':
rate = st['sample_rate']
desc = create_audio_descriptor(f, st)
desc['Locator'].append(create_network_locator(f, path))
desc['MediaContainerGUID'].value = container_guid
descriptors.append(desc)
for i in range(st['channels']):
tape_slot = tape_mob.create_empty_sequence_slot(edit_rate, media_kind='sound')
tape_slot.segment.length = tape_length
nul_ref = f.create.SourceClip(media_kind='sound')
nul_ref.length = tape_length
tape_slot.segment.components.append(nul_ref)
tape_clip = tape_mob.create_source_clip(tape_slot.slot_id)
tape_clip.length = length
tape_clip.media_kind = 'sound'
src_slot = src_mob.create_empty_sequence_slot(edit_rate, media_kind='sound')
src_slot.segment.length = length
src_slot.segment.components.append(tape_clip)
src_slot['PhysicalTrackNumber'].value = i+1
# src_slot = src_mob.create_empty_slot(edit_rate, media_kind='sound')
# src_slot.segment.length = length
clip = src_mob.create_source_clip(src_slot.slot_id)
clip.length = length
clip.media_kind = 'sound'
master_slot = master_mob.create_empty_sequence_slot(edit_rate, media_kind='sound')
master_slot.segment.components.append(clip)
master_slot.segment.length = length
master_slot['PhysicalTrackNumber'].value = i+1
if len(descriptors) > 1:
desc = f.create.MultipleDescriptor()
desc['Length'].value = 0
desc['SampleRate'].value = edit_rate
desc['MediaContainerGUID'].value = container_guid
desc['Locator'].append(create_network_locator(f, path))
desc['FileDescriptors'].value = descriptors
src_mob.descriptor = desc
else:
src_mob.descriptor = descriptors[0]
return master_mob, src_mob, tape_mob
def create_wav_descriptor(f, source_mob, path, stream_meta):
d = f.create.WAVEDescriptor()
rate = stream_meta['sample_rate']
d['SampleRate'].value = rate
d['Summary'].value = wave_infochunk(path)
d['Length'].value = stream_meta['duration_ts']
d['ContainerFormat'].value = source_mob.root.dictionary.lookup_containerdef("AAF")
d['Locator'].append( create_network_locator(f,path) )
return d
def create_wav_link(f, metadata):
"""
This will return three MOBs for the given `metadata`: master_mob, source_mob,
tape_mob
The parameter `metadata` is presumed to be a dictionary from a run of ffprobe.
It's not clear for the purposes of Pro Tools that a tape_mob needs to be made,
it'll open the AAF perfectly well without out one.
A lot of this recaps the AMA link code but it's subtly different enough, but it
could all bear to be refactored.
"""
path = metadata['format']['filename']
master_mob = f.create.MasterMob()
source_mob = f.create.SourceMob()
tape_mob = f.create.SourceMob()
edit_rate = metadata['streams'][0]['sample_rate']
length = metadata['streams'][0]['duration_ts']
master_mob.name = os.path.basename(path)
source_mob.name = os.path.basename(path) + " Source MOB"
tape_mob.name = os.path.basename(path) + " Tape MOB"
container_guid = AUID("3711d3cc-62d0-49d7-b0ae-c118101d1a16") # WAVE/AIFF
f.content.mobs.append(master_mob)
f.content.mobs.append(source_mob)
f.content.mobs.append(tape_mob)
tape_mob.descriptor = f.create.TapeDescriptor()
tape_mob.descriptor["VideoSignal"].value = "VideoSignalNull"
# Tape timecode
t = tape_mob.create_empty_sequence_slot(edit_rate, media_kind='timecode')
tc = f.create.Timecode(int(float(edit_rate)+0.5), drop=False)
tc.length = int(length)
if 'tags' not in metadata['format'].keys() or \
'time_reference' not in metadata['format']['tags']:
tc.start = 0
else:
tc.start = metadata['format']['tags']['time_reference'] or 0
t.segment.length = int(length)
t.segment.components.append(tc)
descriptor = create_wav_descriptor(f, source_mob, path, metadata['streams'][0])
source_mob.descriptor = descriptor
for channel_index in range(metadata['streams'][0]['channels']):
tape_slot = tape_mob.create_empty_sequence_slot(edit_rate, media_kind='sound')
tape_slot.segment.length = length
nul_ref = f.create.SourceClip(media_kind='sound')
nul_ref.length = length
tape_slot.segment.components.append(nul_ref)
tape_clip = tape_mob.create_source_clip(tape_slot.slot_id)
tape_clip.length = length
tape_clip.media_kind = 'sound'
src_slot = source_mob.create_empty_sequence_slot(edit_rate, media_kind='sound')
src_slot.segment.length = length
src_slot.segment.components.append(tape_clip)
src_slot['PhysicalTrackNumber'].value = channel_index + 1
clip = source_mob.create_source_clip(src_slot.slot_id)
clip.length = length
clip.media_kind = 'sound'
master_slot = master_mob.create_empty_sequence_slot(edit_rate, media_kind='sound')
master_slot.segment.components.append(clip)
master_slot.segment.length = length
master_slot['PhysicalTrackNumber'].value = channel_index + 1
return master_mob, source_mob, tape_mob
|
markreidvfx/pyaaf2
|
aaf2/ama.py
|
create_wav_link
|
python
|
def create_wav_link(f, metadata):
path = metadata['format']['filename']
master_mob = f.create.MasterMob()
source_mob = f.create.SourceMob()
tape_mob = f.create.SourceMob()
edit_rate = metadata['streams'][0]['sample_rate']
length = metadata['streams'][0]['duration_ts']
master_mob.name = os.path.basename(path)
source_mob.name = os.path.basename(path) + " Source MOB"
tape_mob.name = os.path.basename(path) + " Tape MOB"
container_guid = AUID("3711d3cc-62d0-49d7-b0ae-c118101d1a16") # WAVE/AIFF
f.content.mobs.append(master_mob)
f.content.mobs.append(source_mob)
f.content.mobs.append(tape_mob)
tape_mob.descriptor = f.create.TapeDescriptor()
tape_mob.descriptor["VideoSignal"].value = "VideoSignalNull"
# Tape timecode
t = tape_mob.create_empty_sequence_slot(edit_rate, media_kind='timecode')
tc = f.create.Timecode(int(float(edit_rate)+0.5), drop=False)
tc.length = int(length)
if 'tags' not in metadata['format'].keys() or \
'time_reference' not in metadata['format']['tags']:
tc.start = 0
else:
tc.start = metadata['format']['tags']['time_reference'] or 0
t.segment.length = int(length)
t.segment.components.append(tc)
descriptor = create_wav_descriptor(f, source_mob, path, metadata['streams'][0])
source_mob.descriptor = descriptor
for channel_index in range(metadata['streams'][0]['channels']):
tape_slot = tape_mob.create_empty_sequence_slot(edit_rate, media_kind='sound')
tape_slot.segment.length = length
nul_ref = f.create.SourceClip(media_kind='sound')
nul_ref.length = length
tape_slot.segment.components.append(nul_ref)
tape_clip = tape_mob.create_source_clip(tape_slot.slot_id)
tape_clip.length = length
tape_clip.media_kind = 'sound'
src_slot = source_mob.create_empty_sequence_slot(edit_rate, media_kind='sound')
src_slot.segment.length = length
src_slot.segment.components.append(tape_clip)
src_slot['PhysicalTrackNumber'].value = channel_index + 1
clip = source_mob.create_source_clip(src_slot.slot_id)
clip.length = length
clip.media_kind = 'sound'
master_slot = master_mob.create_empty_sequence_slot(edit_rate, media_kind='sound')
master_slot.segment.components.append(clip)
master_slot.segment.length = length
master_slot['PhysicalTrackNumber'].value = channel_index + 1
return master_mob, source_mob, tape_mob
|
This will return three MOBs for the given `metadata`: master_mob, source_mob,
tape_mob
The parameter `metadata` is presumed to be a dictionary from a run of ffprobe.
It's not clear for the purposes of Pro Tools that a tape_mob needs to be made,
it'll open the AAF perfectly well without out one.
A lot of this recaps the AMA link code but it's subtly different enough, but it
could all bear to be refactored.
|
train
|
https://github.com/markreidvfx/pyaaf2/blob/37de8c10d3c3495cc00c705eb6c5048bc4a7e51f/aaf2/ama.py#L365-L441
|
[
"def create_wav_descriptor(f, source_mob, path, stream_meta):\n d = f.create.WAVEDescriptor()\n rate = stream_meta['sample_rate']\n d['SampleRate'].value = rate\n d['Summary'].value = wave_infochunk(path)\n d['Length'].value = stream_meta['duration_ts']\n d['ContainerFormat'].value = source_mob.root.dictionary.lookup_containerdef(\"AAF\")\n d['Locator'].append( create_network_locator(f,path) )\n return d\n"
] |
from __future__ import (
unicode_literals,
absolute_import,
print_function,
division,
)
import os
import sys
from .rational import AAFRational
from . import video
from . import audio
from . import mxf
from .auid import AUID
import struct
MediaContainerGUID = {
"Generic" : (AUID("b22697a2-3442-44e8-bb8f-7a1cd290ebf1"),
('.3g2', '.3gp', '.aac', '.au', '.avi', '.bmp', '.dv', '.gif',
'.jfif', '.jpeg', '.jpg', '.m4a', '.mid', '.moov', '.mov',
'.movie', '.mp2', '.mp3', '.mp4', '.mpa', '.mpe', '.mpeg',
'.mpg', '.png', '.psd', '.qt', '.tif', '.tiff',)),
"AVCHD" : (AUID("f37d624b307d4ef59bebc539046cad54"),
('.mts', '.m2ts',)),
"ImageSequencer" : (AUID("4964178d-b3d5-485f-8e98-beb89d92a5f4"),
('.dpx',)),
"CanonRaw" : (AUID("0f299461-ee19-459f-8ae6-93e65c76a892"),
('.rmf',)),
"WaveAiff" : (AUID("3711d3cc-62d0-49d7-b0ae-c118101d1a16"),
('.wav', '.wave', '.bwf', '.aif', '.aiff', '.aifc', '.cdda',)),
"MXF" : (AUID("60eb8921-2a02-4406-891c-d9b6a6ae0645"),
('.mxf',)),
"QuickTime" : (AUID("781f84b7-b989-4534-8a07-c595cb9a6fb8"),
('.mov', '.mp4', '.m4v', '.mpg', '.mpe', '.mpeg', '.3gp', '.3g2',
'.qt', '.moov', '.movie', '.avi', '.mp2', '.mp3', '.m4a', '.wav',
'.aiff', '.aif', '.au', '.aac', '.mid', '.mpa', '.gif', '.jpg',
'.jpeg', '.jfif', '.tif', '.tiff', '.png', '.bmp', '.psd', '.dv')),
}
def pixel_sizes(pix_fmt):
h_samp = 2
v_samp = 2
depth = 8
if pix_fmt.count('420'):
h_samp = 2
v_samp = 2
elif pix_fmt.count('422'):
h_samp = 2
v_samp = 1
elif pix_fmt.count('444'):
h_samp = 1
v_samp = 1
for i in [8, 10, 12, 16]:
if pix_fmt.count("p%d" % i):
depth = i
break
return (depth, h_samp, v_samp)
def get_avc_compression(meta):
profile = meta.get('profile', None)
key = 'CompressedPicture'
if profile == "Baseline":
key = 'AVCBaselineUnconstrained'
elif profile == "Constrained Baseline":
key = 'AVCConstrainedBaselineUnconstrained'
elif profile == "Main":
key = 'AVCMainUnconstrained'
elif profile == "Extended":
key ='AVCExtendedUnconstrained'
elif profile == "High":
key = 'AVCHighUnconstrained'
elif profile == "High 10":
key = 'AVCHigh10Unconstrained'
elif profile == "High 10 Intra":
key = 'AVCHigh10IntraUnconstrained'
elif profile == "High 4:2:2":
key = 'AVCHigh422Unconstrained'
elif profile == "High 4:2:2 Intra":
key = 'AVCHigh422IntraUnconstrained'
elif profile == "High 4:4:4":
# key = 'AVCHigh444IntraUnconstrained'
key = 'CompressedPicture'
elif profile == "High 4:4:4 Predictive":
# key = 'AVCHigh444PredictiveUnconstrained'
key = 'CompressedPicture'
elif profile == "High 4:4:4 Intra":
# key = 'AVCHigh444IntraUnconstrained'
key = 'CompressedPicture'
elif profile == 'CAVLC 4:4:4':
# key = 'AVCCAVLC444IntraUnconstrained'
key = 'CompressedPicture'
return video.compression_ids[key]
def get_compression(meta):
codec_name = meta.get('codec_name', None)
if codec_name == 'mjpeg':
return video.compression_ids['mjpeg']
if codec_name == 'h264':
return get_avc_compression(meta)
return video.compression_ids['CompressedPicture']
def create_video_descriptor(f, meta):
d = f.create.CDCIDescriptor()
depth, h_samp, v_samp = pixel_sizes(meta['pix_fmt'])
width = meta['width']
height = meta['height']
# aspect_ratio = meta.get('display_aspect_ratio', "").replace(":", '/')
aspect_ratio = "%d/%d" % (width, height)
d['ComponentWidth'].value = depth
d['HorizontalSubsampling'].value = h_samp
d['VerticalSubsampling'].value = v_samp
d['FrameLayout'].value = 'FullFrame'
d['VideoLineMap'].value = [0,0]
# d['VideoLineMap'].value = [42, 0]
d['ImageAspectRatio'].value = aspect_ratio
d['StoredWidth'].value = width
d['StoredHeight'].value = height
d['SampleRate'].value = meta['avg_frame_rate']
compression = get_compression(meta)
d['Compression'].value = compression
# d['ResolutionID'].value = 2900
# d['Compression'].value = AUID('04010202-0000-0000-060e-2b3404010101')
d['Length'].value = int(meta['nb_frames'])
return d
def create_audio_descriptor(f, meta):
d = f.create.PCMDescriptor()
rate = meta['sample_rate']
d['SampleRate'].value = rate
d['AudioSamplingRate'].value = rate
d['Channels'].value = meta['channels']
d['AverageBPS'].value = int(meta['bit_rate'])
bit_depth, block_align = audio.audio_format_sizes.get(meta['sample_fmt'], (0,0))
d['QuantizationBits'].value = bit_depth
d['BlockAlign'].value = block_align
duration = float(meta['duration'])
d['Length'].value = int(duration * float(rate))
d['Compression'].value = AUID('04020202-0000-0000-060e-2b3404010101')
return d
def create_network_locator(f, absolute_path):
n = f.create.NetworkLocator()
if sys.version_info[0] < 3:
import urllib
n['URLString'].value = 'file://' + urllib.pathname2url(absolute_path)
else:
import pathlib
n['URLString'].value = pathlib.Path(absolute_path).as_uri()
return n
def guess_edit_rate(metadata):
for st in metadata['streams']:
codec_type = st['codec_type']
if codec_type == 'video':
return AAFRational(st['avg_frame_rate'])
elif codec_type == 'audio':
return AAFRational(st['sample_rate'])
def guess_length(metadata, edit_rate):
for st in metadata['streams']:
codec_type = st['codec_type']
if codec_type == 'video':
return int(st['nb_frames'])
def get_container_guid(metadata):
for st in metadata['streams']:
codec_name = st['codec_name']
if codec_name in ('prores', ):
return MediaContainerGUID['QuickTime']
return MediaContainerGUID['Generic']
def create_ama_link(f, path, metadata):
tape_length = 4142016
basename = os.path.basename(path)
name, ext = os.path.splitext(basename)
path = os.path.abspath(path)
if ext.lower() == '.mxf':
m = mxf.MXFFile(path)
m.ama = True
m.dump()
return m.link(f)
edit_rate = guess_edit_rate(metadata)
length = guess_length(metadata, edit_rate)
container_guid, formats = get_container_guid(metadata)
master_mob = f.create.MasterMob()
src_mob = f.create.SourceMob()
tape_mob = f.create.SourceMob()
master_mob.name = basename
f.content.mobs.append(master_mob)
f.content.mobs.append(src_mob)
f.content.mobs.append(tape_mob)
tape_mob.descriptor = f.create.ImportDescriptor()
tape_mob.descriptor['MediaContainerGUID'].value = container_guid
tape_mob.descriptor['Locator'].append(create_network_locator(f, path))
t = tape_mob.create_empty_sequence_slot(edit_rate, media_kind='timecode')
tc = f.create.Timecode(int(float(edit_rate)+0.5), drop=False)
tc.length = tape_length
tc.start = 0
t.segment.length = tape_length
t.segment.components.append(tc)
descriptors = []
for st in metadata['streams']:
codec_name = st.get('codec_name', None)
codec_type = st['codec_type']
if codec_type == 'video':
desc = create_video_descriptor(f, st)
desc['Locator'].append(create_network_locator(f, path))
desc['MediaContainerGUID'].value = container_guid
descriptors.append(desc)
# MC Quicktime plugin will error if theis is not set to something...
src_mob.comments['Video'] = codec_name
tape_slot = tape_mob.create_empty_sequence_slot(edit_rate, media_kind='picture')
tape_slot.segment.length = tape_length
nul_ref = f.create.SourceClip(media_kind='picture')
nul_ref.length = tape_length
tape_slot.segment.components.append(nul_ref)
tape_clip = tape_mob.create_source_clip(tape_slot.slot_id)
tape_clip.length = length
tape_clip.media_kind = 'picture'
src_slot = src_mob.create_empty_sequence_slot(edit_rate, media_kind='picture')
src_slot.segment.length = length
src_slot.segment.components.append(tape_clip)
# src_slot = src_mob.create_empty_slot(edit_rate, media_kind='picture')
# src_slot.segment.length = length
clip = src_mob.create_source_clip(src_slot.slot_id)
clip.length = length
clip.media_kind = 'picture'
master_slot = master_mob.create_empty_sequence_slot(edit_rate, media_kind='picture')
master_slot.segment.components.append(clip)
master_slot.segment.length = length
elif codec_type == 'audio':
rate = st['sample_rate']
desc = create_audio_descriptor(f, st)
desc['Locator'].append(create_network_locator(f, path))
desc['MediaContainerGUID'].value = container_guid
descriptors.append(desc)
for i in range(st['channels']):
tape_slot = tape_mob.create_empty_sequence_slot(edit_rate, media_kind='sound')
tape_slot.segment.length = tape_length
nul_ref = f.create.SourceClip(media_kind='sound')
nul_ref.length = tape_length
tape_slot.segment.components.append(nul_ref)
tape_clip = tape_mob.create_source_clip(tape_slot.slot_id)
tape_clip.length = length
tape_clip.media_kind = 'sound'
src_slot = src_mob.create_empty_sequence_slot(edit_rate, media_kind='sound')
src_slot.segment.length = length
src_slot.segment.components.append(tape_clip)
src_slot['PhysicalTrackNumber'].value = i+1
# src_slot = src_mob.create_empty_slot(edit_rate, media_kind='sound')
# src_slot.segment.length = length
clip = src_mob.create_source_clip(src_slot.slot_id)
clip.length = length
clip.media_kind = 'sound'
master_slot = master_mob.create_empty_sequence_slot(edit_rate, media_kind='sound')
master_slot.segment.components.append(clip)
master_slot.segment.length = length
master_slot['PhysicalTrackNumber'].value = i+1
if len(descriptors) > 1:
desc = f.create.MultipleDescriptor()
desc['Length'].value = 0
desc['SampleRate'].value = edit_rate
desc['MediaContainerGUID'].value = container_guid
desc['Locator'].append(create_network_locator(f, path))
desc['FileDescriptors'].value = descriptors
src_mob.descriptor = desc
else:
src_mob.descriptor = descriptors[0]
return master_mob, src_mob, tape_mob
def wave_infochunk(path):
"""
Returns a bytearray of the WAVE RIFF header and fmt
chunk for a `WAVEDescriptor` `Summary`
"""
with open(path,'rb') as file:
if file.read(4) != b"RIFF":
return None
data_size = file.read(4) # container size
if file.read(4) != b"WAVE":
return None
while True:
chunkid = file.read(4)
sizebuf = file.read(4)
if len(sizebuf) < 4 or len(chunkid) < 4:
return None
size = struct.unpack(b'<L', sizebuf )[0]
if chunkid[0:3] != b"fmt":
if size % 2 == 1:
seek = size + 1
else:
seek = size
file.seek(size,1)
else:
return bytearray(b"RIFF" + data_size + b"WAVE" + chunkid + sizebuf + file.read(size))
def create_wav_descriptor(f, source_mob, path, stream_meta):
d = f.create.WAVEDescriptor()
rate = stream_meta['sample_rate']
d['SampleRate'].value = rate
d['Summary'].value = wave_infochunk(path)
d['Length'].value = stream_meta['duration_ts']
d['ContainerFormat'].value = source_mob.root.dictionary.lookup_containerdef("AAF")
d['Locator'].append( create_network_locator(f,path) )
return d
|
markreidvfx/pyaaf2
|
aaf2/cfb.py
|
DirEntry.pop
|
python
|
def pop(self):
entry = self
parent = self.parent
root = parent.child()
dir_per_sector = self.storage.sector_size // 128
max_dirs_entries = self.storage.dir_sector_count * dir_per_sector
count = 0
if root.dir_id == entry.dir_id:
parent.child_id = None
else:
# find dir entry pointing to self
while True:
if count > max_dirs_entries:
raise CompoundFileBinaryError("max dir entries limit reached")
if entry < root:
if root.left_id == entry.dir_id:
root.left_id = None
break
root = root.left()
else:
if root.right_id == entry.dir_id:
# root right is pointing to self
root.right_id = None
break
root = root.right()
count += 1
left = entry.left()
right = entry.right()
# clear from cache
if parent.dir_id in self.storage.children_cache:
del self.storage.children_cache[parent.dir_id][entry.name]
if left:
del self.storage.children_cache[parent.dir_id][left.name]
if right:
del self.storage.children_cache[parent.dir_id][right.name]
if left is not None:
parent.add_child(left)
if right is not None:
parent.add_child(right)
# clear parent and left and right
self.left_id = None
self.right_id = None
self.parent = None
|
remove self from binary search tree
|
train
|
https://github.com/markreidvfx/pyaaf2/blob/37de8c10d3c3495cc00c705eb6c5048bc4a7e51f/aaf2/cfb.py#L609-L664
|
[
"def left(self):\n return self.storage.read_dir_entry(self.left_id, self.parent)\n",
"def right(self):\n return self.storage.read_dir_entry(self.right_id, self.parent)\n"
] |
class DirEntry(object):
__slots__ = ('storage', 'dir_id', 'parent', 'data', '__weakref__')
def __init__(self, storage, dir_id, data=None):
self.storage = storage
self.parent = None
if data is None:
self.data = bytearray(128)
# setting dir_id to None disable mark_modified
self.dir_id = None
self.left_id = None
self.right_id = None
self.child_id = None
self.sector_id = None
else:
self.data = data
# mark modified will now work
self.dir_id = dir_id
@property
def name(self):
name_size = unpack_u16le_from(self.data, 64)
assert name_size <= 64
name = decode_utf16le(self.data[:name_size])
return name
@name.setter
def name(self, value):
name_data = value.encode("utf-16le")
name_size = len(name_data)
assert name_size <= 64
self.data[:name_size] = name_data
pad = 64 - name_size
for i in range(pad):
self.data[name_size +i] = 0
# includes null terminator? should re-verify this
struct.pack_into(str('<H'), self.data, 64, min(name_size+2, 64))
self.mark_modified()
@property
def type(self):
return dir_types.get(self.data[66] , "unknown")
@type.setter
def type(self, value):
t = None
for k,v in dir_types.items():
if v == value:
t = k
break
if t is None:
raise ValueError("invalid dir type: %s" % str(value))
self.data[66] = t
self.mark_modified()
@property
def color(self):
if self.data[67] == 0x01:
return 'black'
return 'red'
@color.setter
def color(self, value):
if value == 'black':
self.data[67] = 0x01
elif value == 'red':
self.data[67] = 0x00
else:
raise ValueError("invalid dir type: %s" % str(value))
self.mark_modified()
@property
def left_id(self):
sid = unpack_u32le_from(self.data, 68)
return decode_sid(sid)
@left_id.setter
def left_id(self, value):
struct.pack_into(str('<I'), self.data, 68, encode_sid(value))
self.mark_modified()
@property
def right_id(self):
sid = unpack_u32le_from(self.data, 72)
# sid = struct.unpack_from(str('<I'), bytes(self.data), 72)[0]
return decode_sid(sid)
@right_id.setter
def right_id(self, value):
struct.pack_into(str('<I'), self.data, 72, encode_sid(value))
self.mark_modified()
@property
def child_id(self):
sid = unpack_u32le_from(self.data, 76)
return decode_sid(sid)
@child_id.setter
def child_id(self, value):
struct.pack_into(str('<I'), self.data, 76, encode_sid(value))
self.mark_modified()
@property
def class_id(self):
value = auid.AUID(bytes_le=self.data[80:96])
if value.int == 0:
return None
return value
@class_id.setter
def class_id(self, value):
if value is None:
self.data[80:96] = bytearray(16)
else:
self.data[80:96] = value.bytes_le
self.mark_modified()
@property
def flags(self):
flags = unpack_u32le_from(self.data, 96)
return flags
@flags.setter
def flags(self, value):
struct.pack_into(str('<I'), self.data, 96, value)
self.mark_modified()
@property
def create_time(self):
value = unpack_u64le_from(self.data, 100)
return value
@create_time.setter
def create_time(self, value):
struct.pack_into(str('<Q'), self.data, 100, value)
self.mark_modified()
@property
def modify_time(self):
value = unpack_u64le_from(self.data, 108)
return value
@modify_time.setter
def modify_time(self, value):
struct.pack_into(str('<Q'), bytes(self.data), 108, value)
self.mark_modified()
@property
def sector_id(self):
sid = unpack_u32le_from(self.data, 116)
return decode_sid(sid)
@sector_id.setter
def sector_id(self, value):
struct.pack_into(str('<I'), self.data, 116, encode_sid(value))
self.mark_modified()
@property
def byte_size(self):
value = unpack_u64le_from(self.data, 120)
return value
@byte_size.setter
def byte_size(self, value):
struct.pack_into(str('<Q'), self.data, 120, value)
self.mark_modified()
def mark_modified(self):
if self.storage.mode in ('r', 'rb'):
return
if self.dir_id is None:
return
self.storage.modified[self.dir_id] = self
if len(self.storage.modified) > 128:
self.storage.write_modified_dir_entries()
def __lt__(self, other):
if isinstance(other, DirEntry):
other = other.name
if len(self.name) == len(other):
# compare not case senstive
return self.name.upper() < other.upper()
else:
# shorter names are always less then
return len(self.name) < len(other)
def __le__(self, other):
if self == other:
return True
return self < other
def __gt__(self, other):
return other < self
def __ge__(self, other):
if self == other:
return True
return self > other
def __eq__(self, other):
if other is None:
return False
if isinstance(other, DirEntry):
other = other.name
if len(self.name) == len(other):
return self.name.upper() == other.upper()
return False
def left(self):
return self.storage.read_dir_entry(self.left_id, self.parent)
def right(self):
return self.storage.read_dir_entry(self.right_id, self.parent)
def child(self):
return self.storage.read_dir_entry(self.child_id, self)
def add_child(self, entry):
entry.parent = self
entry.color = 'black'
child = self.child()
if child:
child.insert(entry)
else:
self.child_id = entry.dir_id
if self.dir_id in self.storage.children_cache:
self.storage.children_cache[self.dir_id][entry.name] = entry
def insert(self, entry):
root = self
dir_per_sector = self.storage.sector_size // 128
max_dirs_entries = self.storage.dir_sector_count * dir_per_sector
count = 0
entry.color = 'black'
# avoids recursion
while True:
if count > max_dirs_entries:
raise CompoundFileBinaryError("max dir entries limit reached")
if entry < root:
left = root.left()
if left:
root = left
else:
root.left_id = entry.dir_id
entry.parent = self.parent
break
else:
right = root.right()
if right:
root = right
else:
root.right_id = entry.dir_id
entry.parent = self.parent
break
count += 1
# resucive version
# if entry < self:
# left = self.left()
# if left:
# left.insert(entry)
# else:
# self.left_id = entry.dir_id
# else:
# right = self.right()
# if right:
# right.insert(entry)
# else:
# self.right_id = entry.dir_id
def path(self):
path = []
parent = self
while parent:
name = parent.name
if name == "Root Entry":
break
path.append(parent.name)
parent= parent.parent
return '/' + '/'.join(reversed(path))
def open(self, mode='r'):
if self.type != 'stream':
raise TypeError("can only open streams")
return self.storage.open(self, mode)
def isdir(self):
return self.type in ('storage', 'root storage')
def isroot(self):
return self.type == 'root storage'
def listdir(self):
return self.storage.listdir(self)
def makedir(self, relative_path, class_id = None):
if not self.isdir():
raise TypeError("can only add a DirEntry to a storage type")
sep = '/'
if self.isroot():
sep = ''
path = self.path() + sep + relative_path
return self.storage.makedir(path, class_id)
def isfile(self):
return self.type == 'stream'
def get(self, name, default=None):
dir_dict = self.storage.listdir_dict(self)
return dir_dict.get(name, default)
def touch(self, name):
item = self.get(name, None)
if item:
return item
sep = '/'
if self.isroot():
sep = ''
path = self.path() + sep + name
return self.storage.create_dir_entry(path, 'stream', None)
def write(self):
f = self.storage.f
f.seek(self.storage.dir_entry_pos(self.dir_id))
f.write(self.data)
def read(self):
f = self.storage.f
f.seek(self.storage.dir_entry_pos(self.dir_id))
f.readinto(self.data)
def __repr__(self):
return self.name
|
markreidvfx/pyaaf2
|
aaf2/cfb.py
|
CompoundFileBinary.remove
|
python
|
def remove(self, path):
entry = self.find(path)
if not entry:
raise ValueError("%s does not exists" % path)
if entry.type == 'root storage':
raise ValueError("can no remove root entry")
if entry.type == "storage" and not entry.child_id is None:
raise ValueError("storage contains children")
entry.pop()
# remove stream data
if entry.type == "stream":
self.free_fat_chain(entry.sector_id, entry.byte_size < self.min_stream_max_size)
self.free_dir_entry(entry)
|
Removes both streams and storage DirEntry types from file.
storage type entries need to be empty dirs.
|
train
|
https://github.com/markreidvfx/pyaaf2/blob/37de8c10d3c3495cc00c705eb6c5048bc4a7e51f/aaf2/cfb.py#L1606-L1629
|
[
"def free_fat_chain(self, start_sid, minifat=False):\n fat =self.fat\n if minifat:\n fat = self.minifat\n\n for sid in self.get_fat_chain(start_sid, minifat):\n fat[sid] = FREESECT\n if minifat:\n self.minifat_freelist.insert(0, sid)\n else:\n self.fat_freelist.insert(0, sid)\n",
"def free_dir_entry(self, entry):\n\n # add freelist\n self.dir_freelist.append(entry.dir_id)\n\n # remove from dir caches\n if entry.dir_id in self.dir_cache:\n del self.dir_cache[entry.dir_id]\n\n if entry.dir_id in self.children_cache:\n del self.children_cache[entry.dir_id]\n\n if entry.dir_id in self.modified:\n del self.modified[entry.dir_id]\n\n entry.dir_id = None\n",
"def find(self, path):\n \"\"\"\n find a ``DirEntry`` located at *path*. Returns ``None`` if path\n does not exist.\n \"\"\"\n\n if isinstance(path, DirEntry):\n return path\n\n if path == \"/\":\n return self.root\n\n split_path = path.lstrip('/').split(\"/\")\n\n i = 0\n root = self.root\n\n while True:\n\n children = self.listdir_dict(root)\n match = children.get(split_path[i], None)\n\n if match:\n if i == len(split_path) - 1:\n return match\n root = match\n i += 1\n else:\n return None\n"
] |
class CompoundFileBinary(object):
def __init__(self, file_object, mode='rb', sector_size=4096):
self.f = file_object
self.difat = [[]]
self.fat = array(str('I'))
self.fat_freelist = []
self.minifat = array(str('I'))
self.minifat_freelist = []
self.difat_chain = []
self.minifat_chain = []
self.dir_fat_chain = []
self.mini_stream_chain = []
self.modified = {}
self.sector_cache = LRUCacheDict()
self.dir_cache = weakref.WeakValueDictionary()
self.children_cache = LRUCacheDict()
self.dir_freelist = []
self.debug_grow = False
self.is_open = True
if isinstance(self.f, BytesIO):
self.mode = 'wb+'
else:
self.mode = mode
if self.mode in ("r", "r+", "rb", 'rb+'):
self.read_header()
self.read_fat()
mini_stream_byte_size = self.read_minifat()
# create dir_fat_chain and read root dir entry
self.dir_fat_chain = self.get_fat_chain(self.dir_sector_start)
if len(self.dir_fat_chain) != self.dir_sector_count:
logging.info("read dir_sector_count missmatch, using fat chain length")
self.dir_sector_count = len(self.dir_fat_chain)
logging.debug("read %d dir sectors" % len(self.dir_fat_chain))
self.root = self.read_dir_entry(0)
self.dir_cache[0] = self.root
# create mini stream fat chain
if self.minifat_sector_count:
self.mini_stream_chain = self.get_fat_chain(self.root.sector_id)
if self.root.sector_id is not None and mini_stream_byte_size != self.root.byte_size:
message = "mini stream size missmatch: %d != %d, using size from minifat"
logging.warn(message % (self.root.byte_size, mini_stream_byte_size))
else:
self.setup_empty(sector_size)
self.write_header()
logging.debug("pos: %d" % self.f.tell())
logging.debug("writing root dir sector")
self.root.write()
self.f.write(bytearray(self.sector_size - 128))
self.write_fat()
def close(self):
if self.mode in ("r", "rb"):
return
# caculate mini stream size
if self.root.sector_id is not None:
# I cannot find this documented anywhere but the size of the mini stream
# is the size up to the last mini sector is uses. Not the total Non FREESECT's.
# If self.root.byte_size is not set correctly the some appications will crash hard...
# find last non-free sect
for i,v in enumerate(reversed(self.minifat)):
if v != FREESECT:
break
last_used_sector_id = len(self.minifat) - i
mini_stream_byte_size = (last_used_sector_id * self.mini_stream_sector_size)
self.root.byte_size = mini_stream_byte_size
# Truncate ministream
s = Stream(self, self.root, 'rw')
s.truncate(mini_stream_byte_size)
self.write_header()
self.write_difat()
self.write_fat()
self.write_minifat()
self.write_dir_entries()
# Truncate file to the last free sector
for i,v in enumerate(reversed(self.fat)):
if v != FREESECT:
break
last_used_sector_id = len(self.fat) - i
pos = (last_used_sector_id + 1) * self.sector_size
self.f.seek(pos)
self.f.truncate()
self.is_open = False
def setup_empty(self, sector_size):
if sector_size == 4096:
self.class_id = auid.AUID("0d010201-0200-0000-060e-2b3403020101")
elif sector_size == 512:
self.class_id = auid.AUID("42464141-000d-4d4f-060e-2b34010101ff")
else:
raise ValueError("sector size must be 4096 or 512")
self.major_version = 4
self.minor_version = 62
self.byte_order = "le"
self.sector_size = sector_size
self.mini_stream_sector_size = 64
self.dir_sector_count = 1
self.fat_sector_count = 1
self.dir_sector_start = 0
self.transaction_signature = 1
self.min_stream_max_size = 4096
self.minifat_sector_start = FREESECT
self.minifat_sector_count = 0
self.difat_sector_start = FREESECT
self.difat_sector_count = 0
self.difat = [[]]
for i in range(109):
self.difat[0].append(FREESECT)
self.difat[0][0] = 1
for i in range(self.sector_size // 4):
self.fat.append(FREESECT)
if i > 1:
self.fat_freelist.append(i)
self.fat[0] = ENDOFCHAIN # end of dir chain
self.fat[self.difat[0][0]] = FATSECT
self.root = DirEntry(self, 0)
self.root.name = 'Root Entry'
self.root.sector_id = None
self.root.type = 'root storage'
self.root.class_id = auid.AUID("b3b398a5-1c90-11d4-8053-080036210804")
self.dir_cache[0] = self.root
self.dir_fat_chain = [0]
# raise NotImplementedError("mode: %s supported not implemented" % self.f.mode)
def write_header(self):
logging.debug("writiing header")
f = self.f
f.seek(0)
f.write(b'\xd0\xcf\x11\xe0\xa1\xb1\x1a\xe1') # Magic
f.write(self.class_id.bytes_le)
write_u16le(f, self.minor_version)
write_u16le(f, self.major_version)
write_u16le(f, 0xFFFE) #byte order le
write_u16le(f, int(math.log(self.sector_size, 2)))
write_u16le(f, int(math.log(self.mini_stream_sector_size, 2)))
f.write(b'\0' * 6) #skip reseverd
write_u32le(f, self.dir_sector_count)
write_u32le(f, self.fat_sector_count)
write_u32le(f, self.dir_sector_start)
write_u32le(f, self.transaction_signature)
write_u32le(f, self.min_stream_max_size)
write_u32le(f, self.minifat_sector_start)
write_u32le(f, self.minifat_sector_count)
write_u32le(f, self.difat_sector_start)
write_u32le(f, self.difat_sector_count)
for i in range(109):
write_u32le(f, self.difat[0][i])
for i in range(self.sector_size - f.tell()):
f.write(b'\0')
def read_header(self):
f = self.f
f.seek(0)
magic = f.read(8)
# logging.debug("magic: %s" % magic.encode("hex"))
logging.debug("magic: %s" % str([magic]))
# clsid = f.read(16)
# logging.debug("clsid: %s" % clsid.encode("hex"))
self.class_id = auid.AUID(bytes_le=f.read(16))
logging.debug("clsid: %s" % str(self.class_id))
self.minor_version = read_u16le(f)
logging.debug("minor_version: %d" % self.minor_version)
self.major_version = read_u16le(f)
logging.debug("major_version: %d" % self.major_version)
byte_order = read_u16le(f)
if byte_order == 0xFFFE:
self.byte_order = 'le'
else:
raise NotImplementedError("endian format:0x%X not supported" % byte_order)
logging.debug("byte_order: %s" % self.byte_order)
size = read_u16le(f)
self.sector_size = pow(2, size)
logging.debug("sector_size: %d -> %d" % (size, self.sector_size))
size = read_u16le(f)
self.mini_stream_sector_size = pow(2, size)
logging.debug("mini_stream_sector_size: %d -> %d" % (size, self.mini_stream_sector_size))
if not self.sector_size in (4096, 512):
raise NotImplementedError("unsupported sector size: %d" % self.sector_size)
if self.mini_stream_sector_size != 64:
raise NotImplementedError("unsupported mini sector size: %d" % self.mini_stream_sector_size)
f.read(6) #skip reseverd
self.dir_sector_count = read_u32le(f)
logging.debug("dir_sector_count: %d" % self.dir_sector_count)
self.fat_sector_count = read_u32le(f)
logging.debug("fat_sector_count: %d" % self.fat_sector_count)
self.dir_sector_start = read_u32le(f)
logging.debug("dir_sector_start: %d" % self.dir_sector_start)
self.transaction_signature = read_u32le(f)
logging.debug("transaction_signature: %d" % self.transaction_signature)
self.min_stream_max_size = read_u32le(f)
logging.debug("min_stream_max_size: %d" % self.min_stream_max_size)
self.minifat_sector_start = read_u32le(f)
logging.debug("minifat_sector_start: %d" % self.minifat_sector_start)
self.minifat_sector_count = read_u32le(f)
logging.debug("minifat_sector_count: %d" % self.minifat_sector_count)
self.difat_sector_start = read_u32le(f)
logging.debug("difat_sector_start: %d" % self.difat_sector_start)
self.difat_sector_count = read_u32le(f)
logging.debug("difat_sector_count: %d" % self.difat_sector_count)
self.difat = [[]]
logging.debug("reading header difat at %d" % f.tell())
for i in range(109):
item = read_u32le(f)
# item = fat_sector_types.get(item, item)
self.difat[0].append(item)
sectors_left = self.difat_sector_count
sid = self.difat_sector_start
# reading difat sectors
while sectors_left:
logging.debug("reading difat sid: %d", sid)
sector_type = fat_sector_types.get(sid, sid)
if not isinstance(sector_type, int):
break
self.difat_chain.append(sid)
f.seek((sid + 1) * self.sector_size)
difat = []
for i in range( (self.sector_size // 4)):
item = read_u32le(f)
difat.append(item)
self.difat.append(difat)
sid = difat[-1]
logging.debug("next difat: %d" % sid)
sectors_left -= 1
def iter_difat(self):
for i, sid in enumerate(self.difat[0]):
yield 0, i, sid
t = 1
for item in self.difat[1:]:
for i, sid in enumerate(item[:-1]):
yield t, i, sid
t+=1
def write_difat(self):
f = self.f
# write header entries
f.seek(76)
logging.debug("writing header difat")
for i in range(109):
write_u32le(f, self.difat[0][i])
for i in range(self.sector_size - f.tell()):
f.write(b'\0')
if self.difat_sector_count == 0:
return
sid = self.difat_sector_start
assert len(self.difat[1:]) == self.difat_sector_count
for table in self.difat[1:]:
sector_type = fat_sector_types.get(sid, sid)
if not isinstance(sector_type, int):
raise IOError("bad difat sector type")
pos = (sid + 1) * self.sector_size
logging.debug("writing difat to sid: %d at: %d" % (sid,pos))
f.seek(pos)
for i in range(self.sector_size // 4):
write_u32le(f, table[i])
sid = table[-1]
def read_fat(self):
f = self.f
self.fat = array(str('I'))
sector_count = 0
fat_sectors = []
for t, i, sid in self.iter_difat():
sector_type = fat_sector_types.get(sid, sid)
if not isinstance(sector_type, int):
continue
fat_sectors.append(sid)
# len(fat_sectors),self.fat_sector_count
# assert len(fat_sectors) == self.fat_sector_count
if len(fat_sectors) != self.fat_sector_count:
logging.warn("fat sector count missmatch difat: %d header: %d" % (len(fat_sectors), self.fat_sector_count))
self.fat_sector_count = len(fat_sectors)
for sid in fat_sectors:
pos = (sid + 1) * self.sector_size
f.seek(pos)
extend_sid_table(f, self.fat, self.sector_size)
sector_count += 1
if sys.byteorder == 'big':
self.fat.byteswap()
for i,v in enumerate(self.fat):
if v == FREESECT:
self.fat_freelist.append(i)
logging.debug("read %d fat sectors ", sector_count)
if self.sector_size == 4096 and len(self.fat) > RANGELOCKSECT:
if self.fat[RANGELOCKSECT] != ENDOFCHAIN:
logging.warn("range lock sector has data")
# logging.debug("fat: %s" % str(pretty_sectors(self.fat)))
def write_fat(self):
logging.debug("writing fat")
f = self.f
sector_count = 0
assert len(self.fat)*4 % self.sector_size == 0
fat_sectors = []
for t, i, sid in self.iter_difat():
sector_type = fat_sector_types.get(sid, sid)
if not isinstance(sector_type, int):
continue
fat_sectors.append(sid)
# check that the difat has enough entries to hold the current fat
assert len(fat_sectors) == len(self.fat)*4 // self.sector_size
element_count = self.sector_size // 4
fat_table_struct = Struct(str('<%dI' % element_count))
for i, sid in enumerate(fat_sectors):
# logging.debug("writing fat to sid: %d" % sid)
f.seek((sid + 1) * self.sector_size)
start = i * element_count
end = start + element_count
f.write(fat_table_struct.pack(*self.fat[start:end]))
def read_minifat(self):
f = self.f
sector_count = 0
self.minifat = array(str('I'))
for sid in self.get_fat_chain(self.minifat_sector_start):
self.minifat_chain.append(sid)
f.seek((sid + 1) * self.sector_size)
extend_sid_table(f, self.minifat, self.sector_size)
sector_count += 1
if sys.byteorder == 'big':
self.minifat.byteswap()
# mini_stream_byte_size = 0
last_used_sector = 0
for i,v in enumerate(self.minifat):
if v == FREESECT:
self.minifat_freelist.append(i)
else:
last_used_sector = i
# mini_stream_byte_size += self.mini_stream_sector_size
mini_stream_byte_size = ((last_used_sector+1) * self.mini_stream_sector_size)
# for i, sect in enumerate(pretty_sectors(self.minifat)):
# print(i, sect)
logging.debug("read %d mini fat sectors", sector_count)
return mini_stream_byte_size
def write_minifat(self):
f = self.f
sector_count = 0
element_count = self.sector_size // 4
fat_table_struct = Struct(str('<%dI' % element_count))
for i, sid in enumerate(self.get_fat_chain(self.minifat_sector_start)):
pos = (sid + 1) * self.sector_size
f.seek(pos)
start = i * element_count
end = start + element_count
f.write(fat_table_struct.pack(*self.minifat[start:end]))
def write_modified_dir_entries(self):
f = self.f
for dir_id in sorted(self.modified):
entry = self.modified[dir_id]
stream_pos = entry.dir_id * 128
chain_index = stream_pos // self.sector_size
sid_offset = stream_pos % self.sector_size
sid = self.dir_fat_chain[chain_index]
pos = ((sid + 1) * self.sector_size) + sid_offset
f.seek(pos)
f.write(entry.data)
# invalidate sector
if sid in self.sector_cache:
del self.sector_cache[sid]
self.modified = {}
def write_dir_entries(self):
self.write_modified_dir_entries()
# clear empty DirEntrys
empty_dir = bytearray(128)
f = self.f
self.dir_freelist.sort()
for dir_id in self.dir_freelist:
stream_pos = dir_id * 128
chain_index = stream_pos // self.sector_size
sid_offset = stream_pos % self.sector_size
sid = self.dir_fat_chain[chain_index]
pos = ((sid + 1) * self.sector_size) + sid_offset
f.seek(pos)
f.write(empty_dir)
def next_free_minifat_sect(self):
idx_per_sect = self.sector_size // self.mini_stream_sector_size
stream_sects = len(self.mini_stream_chain) * idx_per_sect
if self.minifat_freelist:
i = self.minifat_freelist.pop(0)
assert self.minifat[i] == FREESECT
if i+1 > stream_sects:
self.mini_stream_grow()
return i
# if we got here need to add aditional fat
sid = self.next_free_sect()
# logging.warn("growing minifat to sid %d" % sid)
idx_start = len(self.minifat)
idx_end = idx_start + self.sector_size // 4
self.minifat.extend([FREESECT for i in range(idx_start, idx_end)])
self.minifat_freelist.extend([i for i in range(idx_start, idx_end)])
if self.minifat_sector_count == 0:
self.minifat_sector_count = 1
self.minifat_sector_start = sid
else:
self.minifat_sector_count += 1
self.fat[self.minifat_chain[-1]] = sid
self.minifat_chain.append(sid)
self.fat[sid] = ENDOFCHAIN
return self.next_free_minifat_sect()
def next_free_sect(self):
if self.fat_freelist:
# print("using fat free list")
i = self.fat_freelist.pop(0)
assert self.fat[i] == FREESECT
# Handle Range Lock Sector
if i == RANGELOCKSECT and self.sector_size == 4096:
self.fat[i] = ENDOFCHAIN
logging.warning("range lock sector in fat freelist, marking ENDOFCHAIN")
return self.next_free_sect()
return i
# if we got here need to add aditional fat
# logging.debug("fat full, growing")
difat_table = None
difat_index = None
for t, i, v in self.iter_difat():
if v == FREESECT:
difat_table = t
difat_index = i
break
new_difat_sect = None
if difat_index is None:
new_difat_sect = len(self.fat) + 1
logging.debug("adding new difat to sid: %d" % new_difat_sect)
if self.difat_sector_count == 0:
self.difat_sector_start = new_difat_sect
self.difat_sector_count = 1
else:
self.difat[-1][-1] = new_difat_sect
self.difat_sector_count += 1
# add difat table
difat = []
for i in range(self.sector_size // 4):
difat.append(FREESECT)
difat[-1] == ENDOFCHAIN
self.difat.append(difat)
for t, i, v in self.iter_difat():
if v == FREESECT:
difat_table = t
difat_index = i
break
new_fat_sect = len(self.fat)
# logging.debug("adding new fat to sid: %d" % new_fat_sect)
self.difat[difat_table][difat_index] = new_fat_sect
# grow fat entries
idx_start = len(self.fat)
idx_end = idx_start + (self.sector_size // 4)
self.fat.extend([FREESECT for i in range(self.sector_size // 4)])
non_free_sids = set([new_fat_sect, new_difat_sect])
# Handle Range Lock Sector
# The range lock sector is the sector
# that covers file offsets 0x7FFFFF00-0x7FFFFFFF in the file
if RANGELOCKSECT < idx_end and RANGELOCKSECT > idx_start and self.sector_size == 4096:
non_free_sids.add(RANGELOCKSECT)
logging.debug("adding range lock")
self.fat[RANGELOCKSECT] = ENDOFCHAIN
freelist = [i for i in range(idx_start, idx_end) if i not in non_free_sids]
self.fat_freelist.extend(freelist)
self.fat[new_fat_sect] = FATSECT
self.fat_sector_count += 1
if not new_difat_sect is None:
self.fat[new_difat_sect] = DIFSECT
return self.next_free_sect()
def read_sector_data(self, sid):
sector_data = self.sector_cache.get(sid, None)
if sector_data is not None:
return sector_data
else:
pos = (sid + 1) * self.sector_size
self.f.seek(pos)
sector_data = bytearray(self.sector_size)
#NOTE: if requested sector doesn't exsit or
# is truncated will padd with zeros, expected behavour
bytes_read = self.f.readinto(sector_data)
self.sector_cache[sid] = sector_data
return sector_data
def get_sid_offset(self, abs_pos):
sid, sid_offset = divmod(abs_pos, self.sector_size)
return sid-1, sid_offset
def dir_entry_sid_offset(self, dir_id):
stream_pos = dir_id * 128
chain_index, sid_offset = divmod(stream_pos, self.sector_size)
sid = self.dir_fat_chain[chain_index]
return sid, sid_offset
def dir_entry_pos(self, dir_id):
sid, sid_offset = self.dir_entry_sid_offset(dir_id)
pos = ((sid + 1) * self.sector_size) + sid_offset
return pos
def read_dir_entry(self, dir_id, parent = None):
if dir_id is None:
return None
entry = self.dir_cache.get(dir_id, None)
if entry is not None:
return entry
# assert not dir_id in self.dir_freelist
stream_pos = dir_id * 128
chain_index = stream_pos // self.sector_size
sid_offset = stream_pos % self.sector_size
sid = self.dir_fat_chain[chain_index]
sector_data = self.read_sector_data(sid)
data= bytearray(sector_data[sid_offset:sid_offset+128])
entry = DirEntry(self, dir_id, data=data)
entry.parent = parent
self.dir_cache[dir_id] = entry
return entry
def clear_sector(self, sid):
sector_pos = (sid + 1) * self.sector_size
self.f.seek(sector_pos)
# for i in range(self.sector_size):
self.f.write(bytearray(self.sector_size))
def next_free_dir_id(self):
# use free list first
if self.dir_freelist:
return self.dir_freelist.pop(0)
f = self.f
sect = self.fat_chain_append(self.dir_fat_chain[-1])
self.dir_fat_chain.append(sect)
self.dir_sector_count += 1
first_dir_id = (len(self.dir_fat_chain) - 1) * self.sector_size // 128
last_dir_id = first_dir_id + (self.sector_size // 128)
self.dir_freelist.extend(range(first_dir_id, last_dir_id))
return self.next_free_dir_id()
def get_fat_chain(self, start_sid, minifat=False):
fat = self.fat
fat_name = "FAT"
if minifat:
fat = self.minifat
fat_name = "MINIFAT"
# Floyd's Tortoise and Hare cycle-finding algorithm
a = start_sid
b = start_sid
sectors = []
if start_sid in (None, ENDOFCHAIN, FREESECT, DIFSECT, FATSECT):
return []
while b != ENDOFCHAIN:
sectors.append(b)
b = fat[b]
if a != ENDOFCHAIN:
a = fat[a]
if a != ENDOFCHAIN:
a = fat[a]
if a == b:
raise CompoundFileBinaryError('cyclic %s fat chain found starting at %d' % (fat_name, start_sid))
return sectors
def mini_stream_grow(self):
sid = self.next_free_sect()
# logging.debug("adding to mini stream fat sid: %d" % sid)
if not self.mini_stream_chain:
self.mini_stream_chain = [sid]
self.root.sector_id = sid
else:
self.fat[self.mini_stream_chain[-1]] = sid
self.mini_stream_chain.append(sid)
self.fat[sid] = ENDOFCHAIN
def fat_chain_append(self, start_sid, minifat=False):
if minifat:
sect = self.next_free_minifat_sect()
# logging.debug("creating new mini sector: %d" % sect)
fat = self.minifat
else:
sect = self.next_free_sect()
# logging.debug("creating new sector: %d" % sect)
fat = self.fat
if start_sid is None:
fat[sect] = ENDOFCHAIN
else:
fat_chain = self.get_fat_chain(start_sid, minifat)
assert fat_chain
fat[fat_chain[-1]] = sect
fat[sect] = ENDOFCHAIN
return sect
def free_fat_chain(self, start_sid, minifat=False):
fat =self.fat
if minifat:
fat = self.minifat
for sid in self.get_fat_chain(start_sid, minifat):
fat[sid] = FREESECT
if minifat:
self.minifat_freelist.insert(0, sid)
else:
self.fat_freelist.insert(0, sid)
def create_dir_entry(self, path, dir_type='storage', class_id=None):
if self.exists(path):
raise ValueError("%s already exists" % path)
dirname = os.path.dirname(path)
basename = os.path.basename(path)
root = self.find(dirname)
if root is None:
raise ValueError("parent dirname does not exist: %s" % dirname)
if not root.type in ('storage', 'root storage'):
raise ValueError("can not add entry to non storage type")
dir_id = self.next_free_dir_id()
logging.debug("next dir id %d" % dir_id)
entry = DirEntry(self, dir_id)
entry.name = basename
entry.type = dir_type
entry.class_id = class_id
# TODO: Implement a Red Black tree
# all new DirEntries are black, so there is no tree balancing.
# AAF Low-Level Container Specification says its alright to do this.
entry.color = 'black'
root.add_child(entry)
self.dir_cache[dir_id] = entry
return entry
def free_dir_entry(self, entry):
# add freelist
self.dir_freelist.append(entry.dir_id)
# remove from dir caches
if entry.dir_id in self.dir_cache:
del self.dir_cache[entry.dir_id]
if entry.dir_id in self.children_cache:
del self.children_cache[entry.dir_id]
if entry.dir_id in self.modified:
del self.modified[entry.dir_id]
entry.dir_id = None
def rmtree(self, path):
"""
Removes directory structure, similar to shutil.rmtree.
"""
for root, storage, streams in self.walk(path, topdown=False):
for item in streams:
self.free_fat_chain(item.sector_id, item.byte_size < self.min_stream_max_size)
self.free_dir_entry(item)
for item in storage:
self.free_dir_entry(item)
root.child_id = None
# remove root item
self.remove(path)
def listdir(self, path = None):
"""
Return a list containing the ``DirEntry`` objects in the directory
given by path.
"""
result = self.listdir_dict(path)
return result.values()
def listdir_dict(self, path = None):
"""
Return a dict containing the ``DirEntry`` objects in the directory
given by path with name of the dir as key.
"""
if path is None:
path = self.root
root = self.find(path)
if root is None:
raise ValueError("unable to find dir: %s" % str(path))
if not root.isdir():
raise ValueError("can only list storage types")
children = self.children_cache.get(root.dir_id, None)
if children is not None:
return children
child = root.child()
result = {}
if not child:
self.children_cache[root.dir_id] = result
return result
dir_per_sector = self.sector_size // 128
max_dirs_entries = self.dir_sector_count * dir_per_sector
stack = deque([child])
count = 0
while stack:
current = stack.pop()
result[current.name] = current
count += 1
if count > max_dirs_entries:
raise CompoundFileBinaryError("corrupt folder structure")
left = current.left()
if left:
stack.append(left)
right = current.right()
if right:
stack.append(right)
self.children_cache[root.dir_id] = result
return result
def find(self, path):
"""
find a ``DirEntry`` located at *path*. Returns ``None`` if path
does not exist.
"""
if isinstance(path, DirEntry):
return path
if path == "/":
return self.root
split_path = path.lstrip('/').split("/")
i = 0
root = self.root
while True:
children = self.listdir_dict(root)
match = children.get(split_path[i], None)
if match:
if i == len(split_path) - 1:
return match
root = match
i += 1
else:
return None
def walk(self, path = None, topdown=True):
"""
Similar to :func:`os.walk`, yeields a 3-tuple ``(root, storage_items, stream_items)``
"""
if path is None:
path = self.root
root = self.find(path)
if not root.isdir():
raise ValueError("can only walk storage types")
if not root.child_id:
return
if topdown:
storage_items = []
stream_items = []
for item in self.listdir(root):
if item.isdir():
storage_items.append(item)
else:
stream_items.append(item)
yield root, storage_items, stream_items
for item in storage_items:
for root, storage_items, stream_items in self.walk(item):
yield root, storage_items, stream_items
else:
def topdown_visit_node(root):
storage_items = []
stream_items = []
for item in self.listdir(root):
if item.isdir():
for sub_root, sub_storage, sub_stream in topdown_visit_node(item):
yield sub_root, sub_storage, sub_stream
storage_items.append(item)
else:
stream_items.append(item)
yield root, storage_items, stream_items
for root_item, storage, stream in topdown_visit_node(root):
yield root_item, storage, stream
def exists(self, path):
"""
Return ``True`` if path refers to a existing path.
"""
if self.find(path) is None:
return False
return True
def makedir(self, path, class_id=None):
"""
Create a storage DirEntry name path
"""
return self.create_dir_entry(path, dir_type='storage', class_id=class_id)
def makedirs(self, path):
"""
Recursive storage DirEntry creation function.
"""
root = ""
assert path.startswith('/')
p = path.strip('/')
for item in p.split('/'):
root += "/" + item
if not self.exists(root):
self.makedir(root)
return self.find(path)
def move(self, src, dst):
"""
Moves ``DirEntry`` from src to dst
"""
src_entry = self.find(src)
if src_entry is None:
raise ValueError("src path does not exist: %s" % src)
if dst.endswith('/'):
dst += src_entry.name
if self.exists(dst):
raise ValueError("dst path already exist: %s" % dst)
if dst == '/' or src == '/':
raise ValueError("cannot overwrite root dir")
split_path = dst.strip('/').split('/')
dst_basename = split_path[-1]
dst_dirname = '/' + '/'.join(split_path[:-1])
# print(dst)
# print(dst_basename, dst_dirname)
dst_entry = self.find(dst_dirname)
if dst_entry is None:
raise ValueError("src path does not exist: %s" % dst_dirname)
if not dst_entry.isdir():
raise ValueError("dst dirname cannot be stream: %s" % dst_dirname)
# src_entry.parent.remove_child(src_entry)
src_entry.pop()
src_entry.parent = None
src_entry.name = dst_basename
dst_entry.add_child(src_entry)
self.children_cache[dst_entry.dir_id][src_entry.name] = src_entry
return src_entry
def open(self, path, mode='r'):
"""Open stream, returning ``Stream`` object"""
entry = self.find(path)
if entry is None:
if mode == 'r':
raise ValueError("stream does not exists: %s" % path)
entry = self.create_dir_entry(path, 'stream', None)
else:
if not entry.isfile():
raise ValueError("can only open stream type DirEntry's")
if mode == 'w':
logging.debug("stream: %s exists, overwriting" % path)
self.free_fat_chain(entry.sector_id, entry.byte_size < self.min_stream_max_size)
entry.sector_id = None
entry.byte_size = 0
entry.class_id = None
elif mode == 'rw':
pass
s = Stream(self, entry, mode)
return s
|
markreidvfx/pyaaf2
|
aaf2/cfb.py
|
CompoundFileBinary.rmtree
|
python
|
def rmtree(self, path):
for root, storage, streams in self.walk(path, topdown=False):
for item in streams:
self.free_fat_chain(item.sector_id, item.byte_size < self.min_stream_max_size)
self.free_dir_entry(item)
for item in storage:
self.free_dir_entry(item)
root.child_id = None
# remove root item
self.remove(path)
|
Removes directory structure, similar to shutil.rmtree.
|
train
|
https://github.com/markreidvfx/pyaaf2/blob/37de8c10d3c3495cc00c705eb6c5048bc4a7e51f/aaf2/cfb.py#L1632-L1648
|
[
"def free_fat_chain(self, start_sid, minifat=False):\n fat =self.fat\n if minifat:\n fat = self.minifat\n\n for sid in self.get_fat_chain(start_sid, minifat):\n fat[sid] = FREESECT\n if minifat:\n self.minifat_freelist.insert(0, sid)\n else:\n self.fat_freelist.insert(0, sid)\n",
"def free_dir_entry(self, entry):\n\n # add freelist\n self.dir_freelist.append(entry.dir_id)\n\n # remove from dir caches\n if entry.dir_id in self.dir_cache:\n del self.dir_cache[entry.dir_id]\n\n if entry.dir_id in self.children_cache:\n del self.children_cache[entry.dir_id]\n\n if entry.dir_id in self.modified:\n del self.modified[entry.dir_id]\n\n entry.dir_id = None\n",
"def remove(self, path):\n \"\"\"\n Removes both streams and storage DirEntry types from file.\n storage type entries need to be empty dirs.\n \"\"\"\n\n entry = self.find(path)\n\n if not entry:\n raise ValueError(\"%s does not exists\" % path)\n\n if entry.type == 'root storage':\n raise ValueError(\"can no remove root entry\")\n\n if entry.type == \"storage\" and not entry.child_id is None:\n raise ValueError(\"storage contains children\")\n\n entry.pop()\n\n # remove stream data\n if entry.type == \"stream\":\n self.free_fat_chain(entry.sector_id, entry.byte_size < self.min_stream_max_size)\n\n self.free_dir_entry(entry)\n",
"def walk(self, path = None, topdown=True):\n \"\"\"\n Similar to :func:`os.walk`, yeields a 3-tuple ``(root, storage_items, stream_items)``\n \"\"\"\n\n if path is None:\n path = self.root\n\n root = self.find(path)\n\n if not root.isdir():\n raise ValueError(\"can only walk storage types\")\n\n if not root.child_id:\n return\n\n if topdown:\n storage_items = []\n stream_items = []\n\n for item in self.listdir(root):\n if item.isdir():\n storage_items.append(item)\n else:\n stream_items.append(item)\n\n yield root, storage_items, stream_items\n\n for item in storage_items:\n for root, storage_items, stream_items in self.walk(item):\n yield root, storage_items, stream_items\n else:\n\n def topdown_visit_node(root):\n storage_items = []\n stream_items = []\n for item in self.listdir(root):\n if item.isdir():\n for sub_root, sub_storage, sub_stream in topdown_visit_node(item):\n yield sub_root, sub_storage, sub_stream\n\n storage_items.append(item)\n else:\n stream_items.append(item)\n\n yield root, storage_items, stream_items\n\n for root_item, storage, stream in topdown_visit_node(root):\n yield root_item, storage, stream\n"
] |
class CompoundFileBinary(object):
def __init__(self, file_object, mode='rb', sector_size=4096):
self.f = file_object
self.difat = [[]]
self.fat = array(str('I'))
self.fat_freelist = []
self.minifat = array(str('I'))
self.minifat_freelist = []
self.difat_chain = []
self.minifat_chain = []
self.dir_fat_chain = []
self.mini_stream_chain = []
self.modified = {}
self.sector_cache = LRUCacheDict()
self.dir_cache = weakref.WeakValueDictionary()
self.children_cache = LRUCacheDict()
self.dir_freelist = []
self.debug_grow = False
self.is_open = True
if isinstance(self.f, BytesIO):
self.mode = 'wb+'
else:
self.mode = mode
if self.mode in ("r", "r+", "rb", 'rb+'):
self.read_header()
self.read_fat()
mini_stream_byte_size = self.read_minifat()
# create dir_fat_chain and read root dir entry
self.dir_fat_chain = self.get_fat_chain(self.dir_sector_start)
if len(self.dir_fat_chain) != self.dir_sector_count:
logging.info("read dir_sector_count missmatch, using fat chain length")
self.dir_sector_count = len(self.dir_fat_chain)
logging.debug("read %d dir sectors" % len(self.dir_fat_chain))
self.root = self.read_dir_entry(0)
self.dir_cache[0] = self.root
# create mini stream fat chain
if self.minifat_sector_count:
self.mini_stream_chain = self.get_fat_chain(self.root.sector_id)
if self.root.sector_id is not None and mini_stream_byte_size != self.root.byte_size:
message = "mini stream size missmatch: %d != %d, using size from minifat"
logging.warn(message % (self.root.byte_size, mini_stream_byte_size))
else:
self.setup_empty(sector_size)
self.write_header()
logging.debug("pos: %d" % self.f.tell())
logging.debug("writing root dir sector")
self.root.write()
self.f.write(bytearray(self.sector_size - 128))
self.write_fat()
def close(self):
if self.mode in ("r", "rb"):
return
# caculate mini stream size
if self.root.sector_id is not None:
# I cannot find this documented anywhere but the size of the mini stream
# is the size up to the last mini sector is uses. Not the total Non FREESECT's.
# If self.root.byte_size is not set correctly the some appications will crash hard...
# find last non-free sect
for i,v in enumerate(reversed(self.minifat)):
if v != FREESECT:
break
last_used_sector_id = len(self.minifat) - i
mini_stream_byte_size = (last_used_sector_id * self.mini_stream_sector_size)
self.root.byte_size = mini_stream_byte_size
# Truncate ministream
s = Stream(self, self.root, 'rw')
s.truncate(mini_stream_byte_size)
self.write_header()
self.write_difat()
self.write_fat()
self.write_minifat()
self.write_dir_entries()
# Truncate file to the last free sector
for i,v in enumerate(reversed(self.fat)):
if v != FREESECT:
break
last_used_sector_id = len(self.fat) - i
pos = (last_used_sector_id + 1) * self.sector_size
self.f.seek(pos)
self.f.truncate()
self.is_open = False
def setup_empty(self, sector_size):
if sector_size == 4096:
self.class_id = auid.AUID("0d010201-0200-0000-060e-2b3403020101")
elif sector_size == 512:
self.class_id = auid.AUID("42464141-000d-4d4f-060e-2b34010101ff")
else:
raise ValueError("sector size must be 4096 or 512")
self.major_version = 4
self.minor_version = 62
self.byte_order = "le"
self.sector_size = sector_size
self.mini_stream_sector_size = 64
self.dir_sector_count = 1
self.fat_sector_count = 1
self.dir_sector_start = 0
self.transaction_signature = 1
self.min_stream_max_size = 4096
self.minifat_sector_start = FREESECT
self.minifat_sector_count = 0
self.difat_sector_start = FREESECT
self.difat_sector_count = 0
self.difat = [[]]
for i in range(109):
self.difat[0].append(FREESECT)
self.difat[0][0] = 1
for i in range(self.sector_size // 4):
self.fat.append(FREESECT)
if i > 1:
self.fat_freelist.append(i)
self.fat[0] = ENDOFCHAIN # end of dir chain
self.fat[self.difat[0][0]] = FATSECT
self.root = DirEntry(self, 0)
self.root.name = 'Root Entry'
self.root.sector_id = None
self.root.type = 'root storage'
self.root.class_id = auid.AUID("b3b398a5-1c90-11d4-8053-080036210804")
self.dir_cache[0] = self.root
self.dir_fat_chain = [0]
# raise NotImplementedError("mode: %s supported not implemented" % self.f.mode)
def write_header(self):
logging.debug("writiing header")
f = self.f
f.seek(0)
f.write(b'\xd0\xcf\x11\xe0\xa1\xb1\x1a\xe1') # Magic
f.write(self.class_id.bytes_le)
write_u16le(f, self.minor_version)
write_u16le(f, self.major_version)
write_u16le(f, 0xFFFE) #byte order le
write_u16le(f, int(math.log(self.sector_size, 2)))
write_u16le(f, int(math.log(self.mini_stream_sector_size, 2)))
f.write(b'\0' * 6) #skip reseverd
write_u32le(f, self.dir_sector_count)
write_u32le(f, self.fat_sector_count)
write_u32le(f, self.dir_sector_start)
write_u32le(f, self.transaction_signature)
write_u32le(f, self.min_stream_max_size)
write_u32le(f, self.minifat_sector_start)
write_u32le(f, self.minifat_sector_count)
write_u32le(f, self.difat_sector_start)
write_u32le(f, self.difat_sector_count)
for i in range(109):
write_u32le(f, self.difat[0][i])
for i in range(self.sector_size - f.tell()):
f.write(b'\0')
def read_header(self):
f = self.f
f.seek(0)
magic = f.read(8)
# logging.debug("magic: %s" % magic.encode("hex"))
logging.debug("magic: %s" % str([magic]))
# clsid = f.read(16)
# logging.debug("clsid: %s" % clsid.encode("hex"))
self.class_id = auid.AUID(bytes_le=f.read(16))
logging.debug("clsid: %s" % str(self.class_id))
self.minor_version = read_u16le(f)
logging.debug("minor_version: %d" % self.minor_version)
self.major_version = read_u16le(f)
logging.debug("major_version: %d" % self.major_version)
byte_order = read_u16le(f)
if byte_order == 0xFFFE:
self.byte_order = 'le'
else:
raise NotImplementedError("endian format:0x%X not supported" % byte_order)
logging.debug("byte_order: %s" % self.byte_order)
size = read_u16le(f)
self.sector_size = pow(2, size)
logging.debug("sector_size: %d -> %d" % (size, self.sector_size))
size = read_u16le(f)
self.mini_stream_sector_size = pow(2, size)
logging.debug("mini_stream_sector_size: %d -> %d" % (size, self.mini_stream_sector_size))
if not self.sector_size in (4096, 512):
raise NotImplementedError("unsupported sector size: %d" % self.sector_size)
if self.mini_stream_sector_size != 64:
raise NotImplementedError("unsupported mini sector size: %d" % self.mini_stream_sector_size)
f.read(6) #skip reseverd
self.dir_sector_count = read_u32le(f)
logging.debug("dir_sector_count: %d" % self.dir_sector_count)
self.fat_sector_count = read_u32le(f)
logging.debug("fat_sector_count: %d" % self.fat_sector_count)
self.dir_sector_start = read_u32le(f)
logging.debug("dir_sector_start: %d" % self.dir_sector_start)
self.transaction_signature = read_u32le(f)
logging.debug("transaction_signature: %d" % self.transaction_signature)
self.min_stream_max_size = read_u32le(f)
logging.debug("min_stream_max_size: %d" % self.min_stream_max_size)
self.minifat_sector_start = read_u32le(f)
logging.debug("minifat_sector_start: %d" % self.minifat_sector_start)
self.minifat_sector_count = read_u32le(f)
logging.debug("minifat_sector_count: %d" % self.minifat_sector_count)
self.difat_sector_start = read_u32le(f)
logging.debug("difat_sector_start: %d" % self.difat_sector_start)
self.difat_sector_count = read_u32le(f)
logging.debug("difat_sector_count: %d" % self.difat_sector_count)
self.difat = [[]]
logging.debug("reading header difat at %d" % f.tell())
for i in range(109):
item = read_u32le(f)
# item = fat_sector_types.get(item, item)
self.difat[0].append(item)
sectors_left = self.difat_sector_count
sid = self.difat_sector_start
# reading difat sectors
while sectors_left:
logging.debug("reading difat sid: %d", sid)
sector_type = fat_sector_types.get(sid, sid)
if not isinstance(sector_type, int):
break
self.difat_chain.append(sid)
f.seek((sid + 1) * self.sector_size)
difat = []
for i in range( (self.sector_size // 4)):
item = read_u32le(f)
difat.append(item)
self.difat.append(difat)
sid = difat[-1]
logging.debug("next difat: %d" % sid)
sectors_left -= 1
def iter_difat(self):
for i, sid in enumerate(self.difat[0]):
yield 0, i, sid
t = 1
for item in self.difat[1:]:
for i, sid in enumerate(item[:-1]):
yield t, i, sid
t+=1
def write_difat(self):
f = self.f
# write header entries
f.seek(76)
logging.debug("writing header difat")
for i in range(109):
write_u32le(f, self.difat[0][i])
for i in range(self.sector_size - f.tell()):
f.write(b'\0')
if self.difat_sector_count == 0:
return
sid = self.difat_sector_start
assert len(self.difat[1:]) == self.difat_sector_count
for table in self.difat[1:]:
sector_type = fat_sector_types.get(sid, sid)
if not isinstance(sector_type, int):
raise IOError("bad difat sector type")
pos = (sid + 1) * self.sector_size
logging.debug("writing difat to sid: %d at: %d" % (sid,pos))
f.seek(pos)
for i in range(self.sector_size // 4):
write_u32le(f, table[i])
sid = table[-1]
def read_fat(self):
f = self.f
self.fat = array(str('I'))
sector_count = 0
fat_sectors = []
for t, i, sid in self.iter_difat():
sector_type = fat_sector_types.get(sid, sid)
if not isinstance(sector_type, int):
continue
fat_sectors.append(sid)
# len(fat_sectors),self.fat_sector_count
# assert len(fat_sectors) == self.fat_sector_count
if len(fat_sectors) != self.fat_sector_count:
logging.warn("fat sector count missmatch difat: %d header: %d" % (len(fat_sectors), self.fat_sector_count))
self.fat_sector_count = len(fat_sectors)
for sid in fat_sectors:
pos = (sid + 1) * self.sector_size
f.seek(pos)
extend_sid_table(f, self.fat, self.sector_size)
sector_count += 1
if sys.byteorder == 'big':
self.fat.byteswap()
for i,v in enumerate(self.fat):
if v == FREESECT:
self.fat_freelist.append(i)
logging.debug("read %d fat sectors ", sector_count)
if self.sector_size == 4096 and len(self.fat) > RANGELOCKSECT:
if self.fat[RANGELOCKSECT] != ENDOFCHAIN:
logging.warn("range lock sector has data")
# logging.debug("fat: %s" % str(pretty_sectors(self.fat)))
def write_fat(self):
logging.debug("writing fat")
f = self.f
sector_count = 0
assert len(self.fat)*4 % self.sector_size == 0
fat_sectors = []
for t, i, sid in self.iter_difat():
sector_type = fat_sector_types.get(sid, sid)
if not isinstance(sector_type, int):
continue
fat_sectors.append(sid)
# check that the difat has enough entries to hold the current fat
assert len(fat_sectors) == len(self.fat)*4 // self.sector_size
element_count = self.sector_size // 4
fat_table_struct = Struct(str('<%dI' % element_count))
for i, sid in enumerate(fat_sectors):
# logging.debug("writing fat to sid: %d" % sid)
f.seek((sid + 1) * self.sector_size)
start = i * element_count
end = start + element_count
f.write(fat_table_struct.pack(*self.fat[start:end]))
def read_minifat(self):
f = self.f
sector_count = 0
self.minifat = array(str('I'))
for sid in self.get_fat_chain(self.minifat_sector_start):
self.minifat_chain.append(sid)
f.seek((sid + 1) * self.sector_size)
extend_sid_table(f, self.minifat, self.sector_size)
sector_count += 1
if sys.byteorder == 'big':
self.minifat.byteswap()
# mini_stream_byte_size = 0
last_used_sector = 0
for i,v in enumerate(self.minifat):
if v == FREESECT:
self.minifat_freelist.append(i)
else:
last_used_sector = i
# mini_stream_byte_size += self.mini_stream_sector_size
mini_stream_byte_size = ((last_used_sector+1) * self.mini_stream_sector_size)
# for i, sect in enumerate(pretty_sectors(self.minifat)):
# print(i, sect)
logging.debug("read %d mini fat sectors", sector_count)
return mini_stream_byte_size
def write_minifat(self):
f = self.f
sector_count = 0
element_count = self.sector_size // 4
fat_table_struct = Struct(str('<%dI' % element_count))
for i, sid in enumerate(self.get_fat_chain(self.minifat_sector_start)):
pos = (sid + 1) * self.sector_size
f.seek(pos)
start = i * element_count
end = start + element_count
f.write(fat_table_struct.pack(*self.minifat[start:end]))
def write_modified_dir_entries(self):
f = self.f
for dir_id in sorted(self.modified):
entry = self.modified[dir_id]
stream_pos = entry.dir_id * 128
chain_index = stream_pos // self.sector_size
sid_offset = stream_pos % self.sector_size
sid = self.dir_fat_chain[chain_index]
pos = ((sid + 1) * self.sector_size) + sid_offset
f.seek(pos)
f.write(entry.data)
# invalidate sector
if sid in self.sector_cache:
del self.sector_cache[sid]
self.modified = {}
def write_dir_entries(self):
self.write_modified_dir_entries()
# clear empty DirEntrys
empty_dir = bytearray(128)
f = self.f
self.dir_freelist.sort()
for dir_id in self.dir_freelist:
stream_pos = dir_id * 128
chain_index = stream_pos // self.sector_size
sid_offset = stream_pos % self.sector_size
sid = self.dir_fat_chain[chain_index]
pos = ((sid + 1) * self.sector_size) + sid_offset
f.seek(pos)
f.write(empty_dir)
def next_free_minifat_sect(self):
idx_per_sect = self.sector_size // self.mini_stream_sector_size
stream_sects = len(self.mini_stream_chain) * idx_per_sect
if self.minifat_freelist:
i = self.minifat_freelist.pop(0)
assert self.minifat[i] == FREESECT
if i+1 > stream_sects:
self.mini_stream_grow()
return i
# if we got here need to add aditional fat
sid = self.next_free_sect()
# logging.warn("growing minifat to sid %d" % sid)
idx_start = len(self.minifat)
idx_end = idx_start + self.sector_size // 4
self.minifat.extend([FREESECT for i in range(idx_start, idx_end)])
self.minifat_freelist.extend([i for i in range(idx_start, idx_end)])
if self.minifat_sector_count == 0:
self.minifat_sector_count = 1
self.minifat_sector_start = sid
else:
self.minifat_sector_count += 1
self.fat[self.minifat_chain[-1]] = sid
self.minifat_chain.append(sid)
self.fat[sid] = ENDOFCHAIN
return self.next_free_minifat_sect()
def next_free_sect(self):
if self.fat_freelist:
# print("using fat free list")
i = self.fat_freelist.pop(0)
assert self.fat[i] == FREESECT
# Handle Range Lock Sector
if i == RANGELOCKSECT and self.sector_size == 4096:
self.fat[i] = ENDOFCHAIN
logging.warning("range lock sector in fat freelist, marking ENDOFCHAIN")
return self.next_free_sect()
return i
# if we got here need to add aditional fat
# logging.debug("fat full, growing")
difat_table = None
difat_index = None
for t, i, v in self.iter_difat():
if v == FREESECT:
difat_table = t
difat_index = i
break
new_difat_sect = None
if difat_index is None:
new_difat_sect = len(self.fat) + 1
logging.debug("adding new difat to sid: %d" % new_difat_sect)
if self.difat_sector_count == 0:
self.difat_sector_start = new_difat_sect
self.difat_sector_count = 1
else:
self.difat[-1][-1] = new_difat_sect
self.difat_sector_count += 1
# add difat table
difat = []
for i in range(self.sector_size // 4):
difat.append(FREESECT)
difat[-1] == ENDOFCHAIN
self.difat.append(difat)
for t, i, v in self.iter_difat():
if v == FREESECT:
difat_table = t
difat_index = i
break
new_fat_sect = len(self.fat)
# logging.debug("adding new fat to sid: %d" % new_fat_sect)
self.difat[difat_table][difat_index] = new_fat_sect
# grow fat entries
idx_start = len(self.fat)
idx_end = idx_start + (self.sector_size // 4)
self.fat.extend([FREESECT for i in range(self.sector_size // 4)])
non_free_sids = set([new_fat_sect, new_difat_sect])
# Handle Range Lock Sector
# The range lock sector is the sector
# that covers file offsets 0x7FFFFF00-0x7FFFFFFF in the file
if RANGELOCKSECT < idx_end and RANGELOCKSECT > idx_start and self.sector_size == 4096:
non_free_sids.add(RANGELOCKSECT)
logging.debug("adding range lock")
self.fat[RANGELOCKSECT] = ENDOFCHAIN
freelist = [i for i in range(idx_start, idx_end) if i not in non_free_sids]
self.fat_freelist.extend(freelist)
self.fat[new_fat_sect] = FATSECT
self.fat_sector_count += 1
if not new_difat_sect is None:
self.fat[new_difat_sect] = DIFSECT
return self.next_free_sect()
def read_sector_data(self, sid):
sector_data = self.sector_cache.get(sid, None)
if sector_data is not None:
return sector_data
else:
pos = (sid + 1) * self.sector_size
self.f.seek(pos)
sector_data = bytearray(self.sector_size)
#NOTE: if requested sector doesn't exsit or
# is truncated will padd with zeros, expected behavour
bytes_read = self.f.readinto(sector_data)
self.sector_cache[sid] = sector_data
return sector_data
def get_sid_offset(self, abs_pos):
sid, sid_offset = divmod(abs_pos, self.sector_size)
return sid-1, sid_offset
def dir_entry_sid_offset(self, dir_id):
stream_pos = dir_id * 128
chain_index, sid_offset = divmod(stream_pos, self.sector_size)
sid = self.dir_fat_chain[chain_index]
return sid, sid_offset
def dir_entry_pos(self, dir_id):
sid, sid_offset = self.dir_entry_sid_offset(dir_id)
pos = ((sid + 1) * self.sector_size) + sid_offset
return pos
def read_dir_entry(self, dir_id, parent = None):
if dir_id is None:
return None
entry = self.dir_cache.get(dir_id, None)
if entry is not None:
return entry
# assert not dir_id in self.dir_freelist
stream_pos = dir_id * 128
chain_index = stream_pos // self.sector_size
sid_offset = stream_pos % self.sector_size
sid = self.dir_fat_chain[chain_index]
sector_data = self.read_sector_data(sid)
data= bytearray(sector_data[sid_offset:sid_offset+128])
entry = DirEntry(self, dir_id, data=data)
entry.parent = parent
self.dir_cache[dir_id] = entry
return entry
def clear_sector(self, sid):
sector_pos = (sid + 1) * self.sector_size
self.f.seek(sector_pos)
# for i in range(self.sector_size):
self.f.write(bytearray(self.sector_size))
def next_free_dir_id(self):
# use free list first
if self.dir_freelist:
return self.dir_freelist.pop(0)
f = self.f
sect = self.fat_chain_append(self.dir_fat_chain[-1])
self.dir_fat_chain.append(sect)
self.dir_sector_count += 1
first_dir_id = (len(self.dir_fat_chain) - 1) * self.sector_size // 128
last_dir_id = first_dir_id + (self.sector_size // 128)
self.dir_freelist.extend(range(first_dir_id, last_dir_id))
return self.next_free_dir_id()
def get_fat_chain(self, start_sid, minifat=False):
fat = self.fat
fat_name = "FAT"
if minifat:
fat = self.minifat
fat_name = "MINIFAT"
# Floyd's Tortoise and Hare cycle-finding algorithm
a = start_sid
b = start_sid
sectors = []
if start_sid in (None, ENDOFCHAIN, FREESECT, DIFSECT, FATSECT):
return []
while b != ENDOFCHAIN:
sectors.append(b)
b = fat[b]
if a != ENDOFCHAIN:
a = fat[a]
if a != ENDOFCHAIN:
a = fat[a]
if a == b:
raise CompoundFileBinaryError('cyclic %s fat chain found starting at %d' % (fat_name, start_sid))
return sectors
def mini_stream_grow(self):
sid = self.next_free_sect()
# logging.debug("adding to mini stream fat sid: %d" % sid)
if not self.mini_stream_chain:
self.mini_stream_chain = [sid]
self.root.sector_id = sid
else:
self.fat[self.mini_stream_chain[-1]] = sid
self.mini_stream_chain.append(sid)
self.fat[sid] = ENDOFCHAIN
def fat_chain_append(self, start_sid, minifat=False):
if minifat:
sect = self.next_free_minifat_sect()
# logging.debug("creating new mini sector: %d" % sect)
fat = self.minifat
else:
sect = self.next_free_sect()
# logging.debug("creating new sector: %d" % sect)
fat = self.fat
if start_sid is None:
fat[sect] = ENDOFCHAIN
else:
fat_chain = self.get_fat_chain(start_sid, minifat)
assert fat_chain
fat[fat_chain[-1]] = sect
fat[sect] = ENDOFCHAIN
return sect
def free_fat_chain(self, start_sid, minifat=False):
fat =self.fat
if minifat:
fat = self.minifat
for sid in self.get_fat_chain(start_sid, minifat):
fat[sid] = FREESECT
if minifat:
self.minifat_freelist.insert(0, sid)
else:
self.fat_freelist.insert(0, sid)
def create_dir_entry(self, path, dir_type='storage', class_id=None):
if self.exists(path):
raise ValueError("%s already exists" % path)
dirname = os.path.dirname(path)
basename = os.path.basename(path)
root = self.find(dirname)
if root is None:
raise ValueError("parent dirname does not exist: %s" % dirname)
if not root.type in ('storage', 'root storage'):
raise ValueError("can not add entry to non storage type")
dir_id = self.next_free_dir_id()
logging.debug("next dir id %d" % dir_id)
entry = DirEntry(self, dir_id)
entry.name = basename
entry.type = dir_type
entry.class_id = class_id
# TODO: Implement a Red Black tree
# all new DirEntries are black, so there is no tree balancing.
# AAF Low-Level Container Specification says its alright to do this.
entry.color = 'black'
root.add_child(entry)
self.dir_cache[dir_id] = entry
return entry
def free_dir_entry(self, entry):
# add freelist
self.dir_freelist.append(entry.dir_id)
# remove from dir caches
if entry.dir_id in self.dir_cache:
del self.dir_cache[entry.dir_id]
if entry.dir_id in self.children_cache:
del self.children_cache[entry.dir_id]
if entry.dir_id in self.modified:
del self.modified[entry.dir_id]
entry.dir_id = None
def remove(self, path):
"""
Removes both streams and storage DirEntry types from file.
storage type entries need to be empty dirs.
"""
entry = self.find(path)
if not entry:
raise ValueError("%s does not exists" % path)
if entry.type == 'root storage':
raise ValueError("can no remove root entry")
if entry.type == "storage" and not entry.child_id is None:
raise ValueError("storage contains children")
entry.pop()
# remove stream data
if entry.type == "stream":
self.free_fat_chain(entry.sector_id, entry.byte_size < self.min_stream_max_size)
self.free_dir_entry(entry)
def listdir(self, path = None):
"""
Return a list containing the ``DirEntry`` objects in the directory
given by path.
"""
result = self.listdir_dict(path)
return result.values()
def listdir_dict(self, path = None):
"""
Return a dict containing the ``DirEntry`` objects in the directory
given by path with name of the dir as key.
"""
if path is None:
path = self.root
root = self.find(path)
if root is None:
raise ValueError("unable to find dir: %s" % str(path))
if not root.isdir():
raise ValueError("can only list storage types")
children = self.children_cache.get(root.dir_id, None)
if children is not None:
return children
child = root.child()
result = {}
if not child:
self.children_cache[root.dir_id] = result
return result
dir_per_sector = self.sector_size // 128
max_dirs_entries = self.dir_sector_count * dir_per_sector
stack = deque([child])
count = 0
while stack:
current = stack.pop()
result[current.name] = current
count += 1
if count > max_dirs_entries:
raise CompoundFileBinaryError("corrupt folder structure")
left = current.left()
if left:
stack.append(left)
right = current.right()
if right:
stack.append(right)
self.children_cache[root.dir_id] = result
return result
def find(self, path):
"""
find a ``DirEntry`` located at *path*. Returns ``None`` if path
does not exist.
"""
if isinstance(path, DirEntry):
return path
if path == "/":
return self.root
split_path = path.lstrip('/').split("/")
i = 0
root = self.root
while True:
children = self.listdir_dict(root)
match = children.get(split_path[i], None)
if match:
if i == len(split_path) - 1:
return match
root = match
i += 1
else:
return None
def walk(self, path = None, topdown=True):
"""
Similar to :func:`os.walk`, yeields a 3-tuple ``(root, storage_items, stream_items)``
"""
if path is None:
path = self.root
root = self.find(path)
if not root.isdir():
raise ValueError("can only walk storage types")
if not root.child_id:
return
if topdown:
storage_items = []
stream_items = []
for item in self.listdir(root):
if item.isdir():
storage_items.append(item)
else:
stream_items.append(item)
yield root, storage_items, stream_items
for item in storage_items:
for root, storage_items, stream_items in self.walk(item):
yield root, storage_items, stream_items
else:
def topdown_visit_node(root):
storage_items = []
stream_items = []
for item in self.listdir(root):
if item.isdir():
for sub_root, sub_storage, sub_stream in topdown_visit_node(item):
yield sub_root, sub_storage, sub_stream
storage_items.append(item)
else:
stream_items.append(item)
yield root, storage_items, stream_items
for root_item, storage, stream in topdown_visit_node(root):
yield root_item, storage, stream
def exists(self, path):
"""
Return ``True`` if path refers to a existing path.
"""
if self.find(path) is None:
return False
return True
def makedir(self, path, class_id=None):
"""
Create a storage DirEntry name path
"""
return self.create_dir_entry(path, dir_type='storage', class_id=class_id)
def makedirs(self, path):
"""
Recursive storage DirEntry creation function.
"""
root = ""
assert path.startswith('/')
p = path.strip('/')
for item in p.split('/'):
root += "/" + item
if not self.exists(root):
self.makedir(root)
return self.find(path)
def move(self, src, dst):
"""
Moves ``DirEntry`` from src to dst
"""
src_entry = self.find(src)
if src_entry is None:
raise ValueError("src path does not exist: %s" % src)
if dst.endswith('/'):
dst += src_entry.name
if self.exists(dst):
raise ValueError("dst path already exist: %s" % dst)
if dst == '/' or src == '/':
raise ValueError("cannot overwrite root dir")
split_path = dst.strip('/').split('/')
dst_basename = split_path[-1]
dst_dirname = '/' + '/'.join(split_path[:-1])
# print(dst)
# print(dst_basename, dst_dirname)
dst_entry = self.find(dst_dirname)
if dst_entry is None:
raise ValueError("src path does not exist: %s" % dst_dirname)
if not dst_entry.isdir():
raise ValueError("dst dirname cannot be stream: %s" % dst_dirname)
# src_entry.parent.remove_child(src_entry)
src_entry.pop()
src_entry.parent = None
src_entry.name = dst_basename
dst_entry.add_child(src_entry)
self.children_cache[dst_entry.dir_id][src_entry.name] = src_entry
return src_entry
def open(self, path, mode='r'):
"""Open stream, returning ``Stream`` object"""
entry = self.find(path)
if entry is None:
if mode == 'r':
raise ValueError("stream does not exists: %s" % path)
entry = self.create_dir_entry(path, 'stream', None)
else:
if not entry.isfile():
raise ValueError("can only open stream type DirEntry's")
if mode == 'w':
logging.debug("stream: %s exists, overwriting" % path)
self.free_fat_chain(entry.sector_id, entry.byte_size < self.min_stream_max_size)
entry.sector_id = None
entry.byte_size = 0
entry.class_id = None
elif mode == 'rw':
pass
s = Stream(self, entry, mode)
return s
|
markreidvfx/pyaaf2
|
aaf2/cfb.py
|
CompoundFileBinary.listdir_dict
|
python
|
def listdir_dict(self, path = None):
if path is None:
path = self.root
root = self.find(path)
if root is None:
raise ValueError("unable to find dir: %s" % str(path))
if not root.isdir():
raise ValueError("can only list storage types")
children = self.children_cache.get(root.dir_id, None)
if children is not None:
return children
child = root.child()
result = {}
if not child:
self.children_cache[root.dir_id] = result
return result
dir_per_sector = self.sector_size // 128
max_dirs_entries = self.dir_sector_count * dir_per_sector
stack = deque([child])
count = 0
while stack:
current = stack.pop()
result[current.name] = current
count += 1
if count > max_dirs_entries:
raise CompoundFileBinaryError("corrupt folder structure")
left = current.left()
if left:
stack.append(left)
right = current.right()
if right:
stack.append(right)
self.children_cache[root.dir_id] = result
return result
|
Return a dict containing the ``DirEntry`` objects in the directory
given by path with name of the dir as key.
|
train
|
https://github.com/markreidvfx/pyaaf2/blob/37de8c10d3c3495cc00c705eb6c5048bc4a7e51f/aaf2/cfb.py#L1660-L1709
|
[
"def find(self, path):\n \"\"\"\n find a ``DirEntry`` located at *path*. Returns ``None`` if path\n does not exist.\n \"\"\"\n\n if isinstance(path, DirEntry):\n return path\n\n if path == \"/\":\n return self.root\n\n split_path = path.lstrip('/').split(\"/\")\n\n i = 0\n root = self.root\n\n while True:\n\n children = self.listdir_dict(root)\n match = children.get(split_path[i], None)\n\n if match:\n if i == len(split_path) - 1:\n return match\n root = match\n i += 1\n else:\n return None\n"
] |
class CompoundFileBinary(object):
def __init__(self, file_object, mode='rb', sector_size=4096):
self.f = file_object
self.difat = [[]]
self.fat = array(str('I'))
self.fat_freelist = []
self.minifat = array(str('I'))
self.minifat_freelist = []
self.difat_chain = []
self.minifat_chain = []
self.dir_fat_chain = []
self.mini_stream_chain = []
self.modified = {}
self.sector_cache = LRUCacheDict()
self.dir_cache = weakref.WeakValueDictionary()
self.children_cache = LRUCacheDict()
self.dir_freelist = []
self.debug_grow = False
self.is_open = True
if isinstance(self.f, BytesIO):
self.mode = 'wb+'
else:
self.mode = mode
if self.mode in ("r", "r+", "rb", 'rb+'):
self.read_header()
self.read_fat()
mini_stream_byte_size = self.read_minifat()
# create dir_fat_chain and read root dir entry
self.dir_fat_chain = self.get_fat_chain(self.dir_sector_start)
if len(self.dir_fat_chain) != self.dir_sector_count:
logging.info("read dir_sector_count missmatch, using fat chain length")
self.dir_sector_count = len(self.dir_fat_chain)
logging.debug("read %d dir sectors" % len(self.dir_fat_chain))
self.root = self.read_dir_entry(0)
self.dir_cache[0] = self.root
# create mini stream fat chain
if self.minifat_sector_count:
self.mini_stream_chain = self.get_fat_chain(self.root.sector_id)
if self.root.sector_id is not None and mini_stream_byte_size != self.root.byte_size:
message = "mini stream size missmatch: %d != %d, using size from minifat"
logging.warn(message % (self.root.byte_size, mini_stream_byte_size))
else:
self.setup_empty(sector_size)
self.write_header()
logging.debug("pos: %d" % self.f.tell())
logging.debug("writing root dir sector")
self.root.write()
self.f.write(bytearray(self.sector_size - 128))
self.write_fat()
def close(self):
if self.mode in ("r", "rb"):
return
# caculate mini stream size
if self.root.sector_id is not None:
# I cannot find this documented anywhere but the size of the mini stream
# is the size up to the last mini sector is uses. Not the total Non FREESECT's.
# If self.root.byte_size is not set correctly the some appications will crash hard...
# find last non-free sect
for i,v in enumerate(reversed(self.minifat)):
if v != FREESECT:
break
last_used_sector_id = len(self.minifat) - i
mini_stream_byte_size = (last_used_sector_id * self.mini_stream_sector_size)
self.root.byte_size = mini_stream_byte_size
# Truncate ministream
s = Stream(self, self.root, 'rw')
s.truncate(mini_stream_byte_size)
self.write_header()
self.write_difat()
self.write_fat()
self.write_minifat()
self.write_dir_entries()
# Truncate file to the last free sector
for i,v in enumerate(reversed(self.fat)):
if v != FREESECT:
break
last_used_sector_id = len(self.fat) - i
pos = (last_used_sector_id + 1) * self.sector_size
self.f.seek(pos)
self.f.truncate()
self.is_open = False
def setup_empty(self, sector_size):
if sector_size == 4096:
self.class_id = auid.AUID("0d010201-0200-0000-060e-2b3403020101")
elif sector_size == 512:
self.class_id = auid.AUID("42464141-000d-4d4f-060e-2b34010101ff")
else:
raise ValueError("sector size must be 4096 or 512")
self.major_version = 4
self.minor_version = 62
self.byte_order = "le"
self.sector_size = sector_size
self.mini_stream_sector_size = 64
self.dir_sector_count = 1
self.fat_sector_count = 1
self.dir_sector_start = 0
self.transaction_signature = 1
self.min_stream_max_size = 4096
self.minifat_sector_start = FREESECT
self.minifat_sector_count = 0
self.difat_sector_start = FREESECT
self.difat_sector_count = 0
self.difat = [[]]
for i in range(109):
self.difat[0].append(FREESECT)
self.difat[0][0] = 1
for i in range(self.sector_size // 4):
self.fat.append(FREESECT)
if i > 1:
self.fat_freelist.append(i)
self.fat[0] = ENDOFCHAIN # end of dir chain
self.fat[self.difat[0][0]] = FATSECT
self.root = DirEntry(self, 0)
self.root.name = 'Root Entry'
self.root.sector_id = None
self.root.type = 'root storage'
self.root.class_id = auid.AUID("b3b398a5-1c90-11d4-8053-080036210804")
self.dir_cache[0] = self.root
self.dir_fat_chain = [0]
# raise NotImplementedError("mode: %s supported not implemented" % self.f.mode)
def write_header(self):
logging.debug("writiing header")
f = self.f
f.seek(0)
f.write(b'\xd0\xcf\x11\xe0\xa1\xb1\x1a\xe1') # Magic
f.write(self.class_id.bytes_le)
write_u16le(f, self.minor_version)
write_u16le(f, self.major_version)
write_u16le(f, 0xFFFE) #byte order le
write_u16le(f, int(math.log(self.sector_size, 2)))
write_u16le(f, int(math.log(self.mini_stream_sector_size, 2)))
f.write(b'\0' * 6) #skip reseverd
write_u32le(f, self.dir_sector_count)
write_u32le(f, self.fat_sector_count)
write_u32le(f, self.dir_sector_start)
write_u32le(f, self.transaction_signature)
write_u32le(f, self.min_stream_max_size)
write_u32le(f, self.minifat_sector_start)
write_u32le(f, self.minifat_sector_count)
write_u32le(f, self.difat_sector_start)
write_u32le(f, self.difat_sector_count)
for i in range(109):
write_u32le(f, self.difat[0][i])
for i in range(self.sector_size - f.tell()):
f.write(b'\0')
def read_header(self):
f = self.f
f.seek(0)
magic = f.read(8)
# logging.debug("magic: %s" % magic.encode("hex"))
logging.debug("magic: %s" % str([magic]))
# clsid = f.read(16)
# logging.debug("clsid: %s" % clsid.encode("hex"))
self.class_id = auid.AUID(bytes_le=f.read(16))
logging.debug("clsid: %s" % str(self.class_id))
self.minor_version = read_u16le(f)
logging.debug("minor_version: %d" % self.minor_version)
self.major_version = read_u16le(f)
logging.debug("major_version: %d" % self.major_version)
byte_order = read_u16le(f)
if byte_order == 0xFFFE:
self.byte_order = 'le'
else:
raise NotImplementedError("endian format:0x%X not supported" % byte_order)
logging.debug("byte_order: %s" % self.byte_order)
size = read_u16le(f)
self.sector_size = pow(2, size)
logging.debug("sector_size: %d -> %d" % (size, self.sector_size))
size = read_u16le(f)
self.mini_stream_sector_size = pow(2, size)
logging.debug("mini_stream_sector_size: %d -> %d" % (size, self.mini_stream_sector_size))
if not self.sector_size in (4096, 512):
raise NotImplementedError("unsupported sector size: %d" % self.sector_size)
if self.mini_stream_sector_size != 64:
raise NotImplementedError("unsupported mini sector size: %d" % self.mini_stream_sector_size)
f.read(6) #skip reseverd
self.dir_sector_count = read_u32le(f)
logging.debug("dir_sector_count: %d" % self.dir_sector_count)
self.fat_sector_count = read_u32le(f)
logging.debug("fat_sector_count: %d" % self.fat_sector_count)
self.dir_sector_start = read_u32le(f)
logging.debug("dir_sector_start: %d" % self.dir_sector_start)
self.transaction_signature = read_u32le(f)
logging.debug("transaction_signature: %d" % self.transaction_signature)
self.min_stream_max_size = read_u32le(f)
logging.debug("min_stream_max_size: %d" % self.min_stream_max_size)
self.minifat_sector_start = read_u32le(f)
logging.debug("minifat_sector_start: %d" % self.minifat_sector_start)
self.minifat_sector_count = read_u32le(f)
logging.debug("minifat_sector_count: %d" % self.minifat_sector_count)
self.difat_sector_start = read_u32le(f)
logging.debug("difat_sector_start: %d" % self.difat_sector_start)
self.difat_sector_count = read_u32le(f)
logging.debug("difat_sector_count: %d" % self.difat_sector_count)
self.difat = [[]]
logging.debug("reading header difat at %d" % f.tell())
for i in range(109):
item = read_u32le(f)
# item = fat_sector_types.get(item, item)
self.difat[0].append(item)
sectors_left = self.difat_sector_count
sid = self.difat_sector_start
# reading difat sectors
while sectors_left:
logging.debug("reading difat sid: %d", sid)
sector_type = fat_sector_types.get(sid, sid)
if not isinstance(sector_type, int):
break
self.difat_chain.append(sid)
f.seek((sid + 1) * self.sector_size)
difat = []
for i in range( (self.sector_size // 4)):
item = read_u32le(f)
difat.append(item)
self.difat.append(difat)
sid = difat[-1]
logging.debug("next difat: %d" % sid)
sectors_left -= 1
def iter_difat(self):
for i, sid in enumerate(self.difat[0]):
yield 0, i, sid
t = 1
for item in self.difat[1:]:
for i, sid in enumerate(item[:-1]):
yield t, i, sid
t+=1
def write_difat(self):
f = self.f
# write header entries
f.seek(76)
logging.debug("writing header difat")
for i in range(109):
write_u32le(f, self.difat[0][i])
for i in range(self.sector_size - f.tell()):
f.write(b'\0')
if self.difat_sector_count == 0:
return
sid = self.difat_sector_start
assert len(self.difat[1:]) == self.difat_sector_count
for table in self.difat[1:]:
sector_type = fat_sector_types.get(sid, sid)
if not isinstance(sector_type, int):
raise IOError("bad difat sector type")
pos = (sid + 1) * self.sector_size
logging.debug("writing difat to sid: %d at: %d" % (sid,pos))
f.seek(pos)
for i in range(self.sector_size // 4):
write_u32le(f, table[i])
sid = table[-1]
def read_fat(self):
f = self.f
self.fat = array(str('I'))
sector_count = 0
fat_sectors = []
for t, i, sid in self.iter_difat():
sector_type = fat_sector_types.get(sid, sid)
if not isinstance(sector_type, int):
continue
fat_sectors.append(sid)
# len(fat_sectors),self.fat_sector_count
# assert len(fat_sectors) == self.fat_sector_count
if len(fat_sectors) != self.fat_sector_count:
logging.warn("fat sector count missmatch difat: %d header: %d" % (len(fat_sectors), self.fat_sector_count))
self.fat_sector_count = len(fat_sectors)
for sid in fat_sectors:
pos = (sid + 1) * self.sector_size
f.seek(pos)
extend_sid_table(f, self.fat, self.sector_size)
sector_count += 1
if sys.byteorder == 'big':
self.fat.byteswap()
for i,v in enumerate(self.fat):
if v == FREESECT:
self.fat_freelist.append(i)
logging.debug("read %d fat sectors ", sector_count)
if self.sector_size == 4096 and len(self.fat) > RANGELOCKSECT:
if self.fat[RANGELOCKSECT] != ENDOFCHAIN:
logging.warn("range lock sector has data")
# logging.debug("fat: %s" % str(pretty_sectors(self.fat)))
def write_fat(self):
logging.debug("writing fat")
f = self.f
sector_count = 0
assert len(self.fat)*4 % self.sector_size == 0
fat_sectors = []
for t, i, sid in self.iter_difat():
sector_type = fat_sector_types.get(sid, sid)
if not isinstance(sector_type, int):
continue
fat_sectors.append(sid)
# check that the difat has enough entries to hold the current fat
assert len(fat_sectors) == len(self.fat)*4 // self.sector_size
element_count = self.sector_size // 4
fat_table_struct = Struct(str('<%dI' % element_count))
for i, sid in enumerate(fat_sectors):
# logging.debug("writing fat to sid: %d" % sid)
f.seek((sid + 1) * self.sector_size)
start = i * element_count
end = start + element_count
f.write(fat_table_struct.pack(*self.fat[start:end]))
def read_minifat(self):
f = self.f
sector_count = 0
self.minifat = array(str('I'))
for sid in self.get_fat_chain(self.minifat_sector_start):
self.minifat_chain.append(sid)
f.seek((sid + 1) * self.sector_size)
extend_sid_table(f, self.minifat, self.sector_size)
sector_count += 1
if sys.byteorder == 'big':
self.minifat.byteswap()
# mini_stream_byte_size = 0
last_used_sector = 0
for i,v in enumerate(self.minifat):
if v == FREESECT:
self.minifat_freelist.append(i)
else:
last_used_sector = i
# mini_stream_byte_size += self.mini_stream_sector_size
mini_stream_byte_size = ((last_used_sector+1) * self.mini_stream_sector_size)
# for i, sect in enumerate(pretty_sectors(self.minifat)):
# print(i, sect)
logging.debug("read %d mini fat sectors", sector_count)
return mini_stream_byte_size
def write_minifat(self):
f = self.f
sector_count = 0
element_count = self.sector_size // 4
fat_table_struct = Struct(str('<%dI' % element_count))
for i, sid in enumerate(self.get_fat_chain(self.minifat_sector_start)):
pos = (sid + 1) * self.sector_size
f.seek(pos)
start = i * element_count
end = start + element_count
f.write(fat_table_struct.pack(*self.minifat[start:end]))
def write_modified_dir_entries(self):
f = self.f
for dir_id in sorted(self.modified):
entry = self.modified[dir_id]
stream_pos = entry.dir_id * 128
chain_index = stream_pos // self.sector_size
sid_offset = stream_pos % self.sector_size
sid = self.dir_fat_chain[chain_index]
pos = ((sid + 1) * self.sector_size) + sid_offset
f.seek(pos)
f.write(entry.data)
# invalidate sector
if sid in self.sector_cache:
del self.sector_cache[sid]
self.modified = {}
def write_dir_entries(self):
self.write_modified_dir_entries()
# clear empty DirEntrys
empty_dir = bytearray(128)
f = self.f
self.dir_freelist.sort()
for dir_id in self.dir_freelist:
stream_pos = dir_id * 128
chain_index = stream_pos // self.sector_size
sid_offset = stream_pos % self.sector_size
sid = self.dir_fat_chain[chain_index]
pos = ((sid + 1) * self.sector_size) + sid_offset
f.seek(pos)
f.write(empty_dir)
def next_free_minifat_sect(self):
idx_per_sect = self.sector_size // self.mini_stream_sector_size
stream_sects = len(self.mini_stream_chain) * idx_per_sect
if self.minifat_freelist:
i = self.minifat_freelist.pop(0)
assert self.minifat[i] == FREESECT
if i+1 > stream_sects:
self.mini_stream_grow()
return i
# if we got here need to add aditional fat
sid = self.next_free_sect()
# logging.warn("growing minifat to sid %d" % sid)
idx_start = len(self.minifat)
idx_end = idx_start + self.sector_size // 4
self.minifat.extend([FREESECT for i in range(idx_start, idx_end)])
self.minifat_freelist.extend([i for i in range(idx_start, idx_end)])
if self.minifat_sector_count == 0:
self.minifat_sector_count = 1
self.minifat_sector_start = sid
else:
self.minifat_sector_count += 1
self.fat[self.minifat_chain[-1]] = sid
self.minifat_chain.append(sid)
self.fat[sid] = ENDOFCHAIN
return self.next_free_minifat_sect()
def next_free_sect(self):
if self.fat_freelist:
# print("using fat free list")
i = self.fat_freelist.pop(0)
assert self.fat[i] == FREESECT
# Handle Range Lock Sector
if i == RANGELOCKSECT and self.sector_size == 4096:
self.fat[i] = ENDOFCHAIN
logging.warning("range lock sector in fat freelist, marking ENDOFCHAIN")
return self.next_free_sect()
return i
# if we got here need to add aditional fat
# logging.debug("fat full, growing")
difat_table = None
difat_index = None
for t, i, v in self.iter_difat():
if v == FREESECT:
difat_table = t
difat_index = i
break
new_difat_sect = None
if difat_index is None:
new_difat_sect = len(self.fat) + 1
logging.debug("adding new difat to sid: %d" % new_difat_sect)
if self.difat_sector_count == 0:
self.difat_sector_start = new_difat_sect
self.difat_sector_count = 1
else:
self.difat[-1][-1] = new_difat_sect
self.difat_sector_count += 1
# add difat table
difat = []
for i in range(self.sector_size // 4):
difat.append(FREESECT)
difat[-1] == ENDOFCHAIN
self.difat.append(difat)
for t, i, v in self.iter_difat():
if v == FREESECT:
difat_table = t
difat_index = i
break
new_fat_sect = len(self.fat)
# logging.debug("adding new fat to sid: %d" % new_fat_sect)
self.difat[difat_table][difat_index] = new_fat_sect
# grow fat entries
idx_start = len(self.fat)
idx_end = idx_start + (self.sector_size // 4)
self.fat.extend([FREESECT for i in range(self.sector_size // 4)])
non_free_sids = set([new_fat_sect, new_difat_sect])
# Handle Range Lock Sector
# The range lock sector is the sector
# that covers file offsets 0x7FFFFF00-0x7FFFFFFF in the file
if RANGELOCKSECT < idx_end and RANGELOCKSECT > idx_start and self.sector_size == 4096:
non_free_sids.add(RANGELOCKSECT)
logging.debug("adding range lock")
self.fat[RANGELOCKSECT] = ENDOFCHAIN
freelist = [i for i in range(idx_start, idx_end) if i not in non_free_sids]
self.fat_freelist.extend(freelist)
self.fat[new_fat_sect] = FATSECT
self.fat_sector_count += 1
if not new_difat_sect is None:
self.fat[new_difat_sect] = DIFSECT
return self.next_free_sect()
def read_sector_data(self, sid):
sector_data = self.sector_cache.get(sid, None)
if sector_data is not None:
return sector_data
else:
pos = (sid + 1) * self.sector_size
self.f.seek(pos)
sector_data = bytearray(self.sector_size)
#NOTE: if requested sector doesn't exsit or
# is truncated will padd with zeros, expected behavour
bytes_read = self.f.readinto(sector_data)
self.sector_cache[sid] = sector_data
return sector_data
def get_sid_offset(self, abs_pos):
sid, sid_offset = divmod(abs_pos, self.sector_size)
return sid-1, sid_offset
def dir_entry_sid_offset(self, dir_id):
stream_pos = dir_id * 128
chain_index, sid_offset = divmod(stream_pos, self.sector_size)
sid = self.dir_fat_chain[chain_index]
return sid, sid_offset
def dir_entry_pos(self, dir_id):
sid, sid_offset = self.dir_entry_sid_offset(dir_id)
pos = ((sid + 1) * self.sector_size) + sid_offset
return pos
def read_dir_entry(self, dir_id, parent = None):
if dir_id is None:
return None
entry = self.dir_cache.get(dir_id, None)
if entry is not None:
return entry
# assert not dir_id in self.dir_freelist
stream_pos = dir_id * 128
chain_index = stream_pos // self.sector_size
sid_offset = stream_pos % self.sector_size
sid = self.dir_fat_chain[chain_index]
sector_data = self.read_sector_data(sid)
data= bytearray(sector_data[sid_offset:sid_offset+128])
entry = DirEntry(self, dir_id, data=data)
entry.parent = parent
self.dir_cache[dir_id] = entry
return entry
def clear_sector(self, sid):
sector_pos = (sid + 1) * self.sector_size
self.f.seek(sector_pos)
# for i in range(self.sector_size):
self.f.write(bytearray(self.sector_size))
def next_free_dir_id(self):
# use free list first
if self.dir_freelist:
return self.dir_freelist.pop(0)
f = self.f
sect = self.fat_chain_append(self.dir_fat_chain[-1])
self.dir_fat_chain.append(sect)
self.dir_sector_count += 1
first_dir_id = (len(self.dir_fat_chain) - 1) * self.sector_size // 128
last_dir_id = first_dir_id + (self.sector_size // 128)
self.dir_freelist.extend(range(first_dir_id, last_dir_id))
return self.next_free_dir_id()
def get_fat_chain(self, start_sid, minifat=False):
fat = self.fat
fat_name = "FAT"
if minifat:
fat = self.minifat
fat_name = "MINIFAT"
# Floyd's Tortoise and Hare cycle-finding algorithm
a = start_sid
b = start_sid
sectors = []
if start_sid in (None, ENDOFCHAIN, FREESECT, DIFSECT, FATSECT):
return []
while b != ENDOFCHAIN:
sectors.append(b)
b = fat[b]
if a != ENDOFCHAIN:
a = fat[a]
if a != ENDOFCHAIN:
a = fat[a]
if a == b:
raise CompoundFileBinaryError('cyclic %s fat chain found starting at %d' % (fat_name, start_sid))
return sectors
def mini_stream_grow(self):
sid = self.next_free_sect()
# logging.debug("adding to mini stream fat sid: %d" % sid)
if not self.mini_stream_chain:
self.mini_stream_chain = [sid]
self.root.sector_id = sid
else:
self.fat[self.mini_stream_chain[-1]] = sid
self.mini_stream_chain.append(sid)
self.fat[sid] = ENDOFCHAIN
def fat_chain_append(self, start_sid, minifat=False):
if minifat:
sect = self.next_free_minifat_sect()
# logging.debug("creating new mini sector: %d" % sect)
fat = self.minifat
else:
sect = self.next_free_sect()
# logging.debug("creating new sector: %d" % sect)
fat = self.fat
if start_sid is None:
fat[sect] = ENDOFCHAIN
else:
fat_chain = self.get_fat_chain(start_sid, minifat)
assert fat_chain
fat[fat_chain[-1]] = sect
fat[sect] = ENDOFCHAIN
return sect
def free_fat_chain(self, start_sid, minifat=False):
fat =self.fat
if minifat:
fat = self.minifat
for sid in self.get_fat_chain(start_sid, minifat):
fat[sid] = FREESECT
if minifat:
self.minifat_freelist.insert(0, sid)
else:
self.fat_freelist.insert(0, sid)
def create_dir_entry(self, path, dir_type='storage', class_id=None):
if self.exists(path):
raise ValueError("%s already exists" % path)
dirname = os.path.dirname(path)
basename = os.path.basename(path)
root = self.find(dirname)
if root is None:
raise ValueError("parent dirname does not exist: %s" % dirname)
if not root.type in ('storage', 'root storage'):
raise ValueError("can not add entry to non storage type")
dir_id = self.next_free_dir_id()
logging.debug("next dir id %d" % dir_id)
entry = DirEntry(self, dir_id)
entry.name = basename
entry.type = dir_type
entry.class_id = class_id
# TODO: Implement a Red Black tree
# all new DirEntries are black, so there is no tree balancing.
# AAF Low-Level Container Specification says its alright to do this.
entry.color = 'black'
root.add_child(entry)
self.dir_cache[dir_id] = entry
return entry
def free_dir_entry(self, entry):
# add freelist
self.dir_freelist.append(entry.dir_id)
# remove from dir caches
if entry.dir_id in self.dir_cache:
del self.dir_cache[entry.dir_id]
if entry.dir_id in self.children_cache:
del self.children_cache[entry.dir_id]
if entry.dir_id in self.modified:
del self.modified[entry.dir_id]
entry.dir_id = None
def remove(self, path):
"""
Removes both streams and storage DirEntry types from file.
storage type entries need to be empty dirs.
"""
entry = self.find(path)
if not entry:
raise ValueError("%s does not exists" % path)
if entry.type == 'root storage':
raise ValueError("can no remove root entry")
if entry.type == "storage" and not entry.child_id is None:
raise ValueError("storage contains children")
entry.pop()
# remove stream data
if entry.type == "stream":
self.free_fat_chain(entry.sector_id, entry.byte_size < self.min_stream_max_size)
self.free_dir_entry(entry)
def rmtree(self, path):
"""
Removes directory structure, similar to shutil.rmtree.
"""
for root, storage, streams in self.walk(path, topdown=False):
for item in streams:
self.free_fat_chain(item.sector_id, item.byte_size < self.min_stream_max_size)
self.free_dir_entry(item)
for item in storage:
self.free_dir_entry(item)
root.child_id = None
# remove root item
self.remove(path)
def listdir(self, path = None):
"""
Return a list containing the ``DirEntry`` objects in the directory
given by path.
"""
result = self.listdir_dict(path)
return result.values()
def find(self, path):
"""
find a ``DirEntry`` located at *path*. Returns ``None`` if path
does not exist.
"""
if isinstance(path, DirEntry):
return path
if path == "/":
return self.root
split_path = path.lstrip('/').split("/")
i = 0
root = self.root
while True:
children = self.listdir_dict(root)
match = children.get(split_path[i], None)
if match:
if i == len(split_path) - 1:
return match
root = match
i += 1
else:
return None
def walk(self, path = None, topdown=True):
"""
Similar to :func:`os.walk`, yeields a 3-tuple ``(root, storage_items, stream_items)``
"""
if path is None:
path = self.root
root = self.find(path)
if not root.isdir():
raise ValueError("can only walk storage types")
if not root.child_id:
return
if topdown:
storage_items = []
stream_items = []
for item in self.listdir(root):
if item.isdir():
storage_items.append(item)
else:
stream_items.append(item)
yield root, storage_items, stream_items
for item in storage_items:
for root, storage_items, stream_items in self.walk(item):
yield root, storage_items, stream_items
else:
def topdown_visit_node(root):
storage_items = []
stream_items = []
for item in self.listdir(root):
if item.isdir():
for sub_root, sub_storage, sub_stream in topdown_visit_node(item):
yield sub_root, sub_storage, sub_stream
storage_items.append(item)
else:
stream_items.append(item)
yield root, storage_items, stream_items
for root_item, storage, stream in topdown_visit_node(root):
yield root_item, storage, stream
def exists(self, path):
"""
Return ``True`` if path refers to a existing path.
"""
if self.find(path) is None:
return False
return True
def makedir(self, path, class_id=None):
"""
Create a storage DirEntry name path
"""
return self.create_dir_entry(path, dir_type='storage', class_id=class_id)
def makedirs(self, path):
"""
Recursive storage DirEntry creation function.
"""
root = ""
assert path.startswith('/')
p = path.strip('/')
for item in p.split('/'):
root += "/" + item
if not self.exists(root):
self.makedir(root)
return self.find(path)
def move(self, src, dst):
"""
Moves ``DirEntry`` from src to dst
"""
src_entry = self.find(src)
if src_entry is None:
raise ValueError("src path does not exist: %s" % src)
if dst.endswith('/'):
dst += src_entry.name
if self.exists(dst):
raise ValueError("dst path already exist: %s" % dst)
if dst == '/' or src == '/':
raise ValueError("cannot overwrite root dir")
split_path = dst.strip('/').split('/')
dst_basename = split_path[-1]
dst_dirname = '/' + '/'.join(split_path[:-1])
# print(dst)
# print(dst_basename, dst_dirname)
dst_entry = self.find(dst_dirname)
if dst_entry is None:
raise ValueError("src path does not exist: %s" % dst_dirname)
if not dst_entry.isdir():
raise ValueError("dst dirname cannot be stream: %s" % dst_dirname)
# src_entry.parent.remove_child(src_entry)
src_entry.pop()
src_entry.parent = None
src_entry.name = dst_basename
dst_entry.add_child(src_entry)
self.children_cache[dst_entry.dir_id][src_entry.name] = src_entry
return src_entry
def open(self, path, mode='r'):
"""Open stream, returning ``Stream`` object"""
entry = self.find(path)
if entry is None:
if mode == 'r':
raise ValueError("stream does not exists: %s" % path)
entry = self.create_dir_entry(path, 'stream', None)
else:
if not entry.isfile():
raise ValueError("can only open stream type DirEntry's")
if mode == 'w':
logging.debug("stream: %s exists, overwriting" % path)
self.free_fat_chain(entry.sector_id, entry.byte_size < self.min_stream_max_size)
entry.sector_id = None
entry.byte_size = 0
entry.class_id = None
elif mode == 'rw':
pass
s = Stream(self, entry, mode)
return s
|
markreidvfx/pyaaf2
|
aaf2/cfb.py
|
CompoundFileBinary.find
|
python
|
def find(self, path):
if isinstance(path, DirEntry):
return path
if path == "/":
return self.root
split_path = path.lstrip('/').split("/")
i = 0
root = self.root
while True:
children = self.listdir_dict(root)
match = children.get(split_path[i], None)
if match:
if i == len(split_path) - 1:
return match
root = match
i += 1
else:
return None
|
find a ``DirEntry`` located at *path*. Returns ``None`` if path
does not exist.
|
train
|
https://github.com/markreidvfx/pyaaf2/blob/37de8c10d3c3495cc00c705eb6c5048bc4a7e51f/aaf2/cfb.py#L1711-L1739
|
[
"def listdir_dict(self, path = None):\n \"\"\"\n Return a dict containing the ``DirEntry`` objects in the directory\n given by path with name of the dir as key.\n \"\"\"\n\n if path is None:\n path = self.root\n\n root = self.find(path)\n if root is None:\n raise ValueError(\"unable to find dir: %s\" % str(path))\n\n if not root.isdir():\n raise ValueError(\"can only list storage types\")\n\n children = self.children_cache.get(root.dir_id, None)\n if children is not None:\n return children\n\n child = root.child()\n\n result = {}\n if not child:\n self.children_cache[root.dir_id] = result\n return result\n\n dir_per_sector = self.sector_size // 128\n max_dirs_entries = self.dir_sector_count * dir_per_sector\n\n stack = deque([child])\n count = 0\n\n while stack:\n current = stack.pop()\n result[current.name] = current\n count += 1\n\n if count > max_dirs_entries:\n raise CompoundFileBinaryError(\"corrupt folder structure\")\n\n left = current.left()\n if left:\n stack.append(left)\n right = current.right()\n if right:\n stack.append(right)\n\n self.children_cache[root.dir_id] = result\n return result\n"
] |
class CompoundFileBinary(object):
def __init__(self, file_object, mode='rb', sector_size=4096):
self.f = file_object
self.difat = [[]]
self.fat = array(str('I'))
self.fat_freelist = []
self.minifat = array(str('I'))
self.minifat_freelist = []
self.difat_chain = []
self.minifat_chain = []
self.dir_fat_chain = []
self.mini_stream_chain = []
self.modified = {}
self.sector_cache = LRUCacheDict()
self.dir_cache = weakref.WeakValueDictionary()
self.children_cache = LRUCacheDict()
self.dir_freelist = []
self.debug_grow = False
self.is_open = True
if isinstance(self.f, BytesIO):
self.mode = 'wb+'
else:
self.mode = mode
if self.mode in ("r", "r+", "rb", 'rb+'):
self.read_header()
self.read_fat()
mini_stream_byte_size = self.read_minifat()
# create dir_fat_chain and read root dir entry
self.dir_fat_chain = self.get_fat_chain(self.dir_sector_start)
if len(self.dir_fat_chain) != self.dir_sector_count:
logging.info("read dir_sector_count missmatch, using fat chain length")
self.dir_sector_count = len(self.dir_fat_chain)
logging.debug("read %d dir sectors" % len(self.dir_fat_chain))
self.root = self.read_dir_entry(0)
self.dir_cache[0] = self.root
# create mini stream fat chain
if self.minifat_sector_count:
self.mini_stream_chain = self.get_fat_chain(self.root.sector_id)
if self.root.sector_id is not None and mini_stream_byte_size != self.root.byte_size:
message = "mini stream size missmatch: %d != %d, using size from minifat"
logging.warn(message % (self.root.byte_size, mini_stream_byte_size))
else:
self.setup_empty(sector_size)
self.write_header()
logging.debug("pos: %d" % self.f.tell())
logging.debug("writing root dir sector")
self.root.write()
self.f.write(bytearray(self.sector_size - 128))
self.write_fat()
def close(self):
if self.mode in ("r", "rb"):
return
# caculate mini stream size
if self.root.sector_id is not None:
# I cannot find this documented anywhere but the size of the mini stream
# is the size up to the last mini sector is uses. Not the total Non FREESECT's.
# If self.root.byte_size is not set correctly the some appications will crash hard...
# find last non-free sect
for i,v in enumerate(reversed(self.minifat)):
if v != FREESECT:
break
last_used_sector_id = len(self.minifat) - i
mini_stream_byte_size = (last_used_sector_id * self.mini_stream_sector_size)
self.root.byte_size = mini_stream_byte_size
# Truncate ministream
s = Stream(self, self.root, 'rw')
s.truncate(mini_stream_byte_size)
self.write_header()
self.write_difat()
self.write_fat()
self.write_minifat()
self.write_dir_entries()
# Truncate file to the last free sector
for i,v in enumerate(reversed(self.fat)):
if v != FREESECT:
break
last_used_sector_id = len(self.fat) - i
pos = (last_used_sector_id + 1) * self.sector_size
self.f.seek(pos)
self.f.truncate()
self.is_open = False
def setup_empty(self, sector_size):
if sector_size == 4096:
self.class_id = auid.AUID("0d010201-0200-0000-060e-2b3403020101")
elif sector_size == 512:
self.class_id = auid.AUID("42464141-000d-4d4f-060e-2b34010101ff")
else:
raise ValueError("sector size must be 4096 or 512")
self.major_version = 4
self.minor_version = 62
self.byte_order = "le"
self.sector_size = sector_size
self.mini_stream_sector_size = 64
self.dir_sector_count = 1
self.fat_sector_count = 1
self.dir_sector_start = 0
self.transaction_signature = 1
self.min_stream_max_size = 4096
self.minifat_sector_start = FREESECT
self.minifat_sector_count = 0
self.difat_sector_start = FREESECT
self.difat_sector_count = 0
self.difat = [[]]
for i in range(109):
self.difat[0].append(FREESECT)
self.difat[0][0] = 1
for i in range(self.sector_size // 4):
self.fat.append(FREESECT)
if i > 1:
self.fat_freelist.append(i)
self.fat[0] = ENDOFCHAIN # end of dir chain
self.fat[self.difat[0][0]] = FATSECT
self.root = DirEntry(self, 0)
self.root.name = 'Root Entry'
self.root.sector_id = None
self.root.type = 'root storage'
self.root.class_id = auid.AUID("b3b398a5-1c90-11d4-8053-080036210804")
self.dir_cache[0] = self.root
self.dir_fat_chain = [0]
# raise NotImplementedError("mode: %s supported not implemented" % self.f.mode)
def write_header(self):
logging.debug("writiing header")
f = self.f
f.seek(0)
f.write(b'\xd0\xcf\x11\xe0\xa1\xb1\x1a\xe1') # Magic
f.write(self.class_id.bytes_le)
write_u16le(f, self.minor_version)
write_u16le(f, self.major_version)
write_u16le(f, 0xFFFE) #byte order le
write_u16le(f, int(math.log(self.sector_size, 2)))
write_u16le(f, int(math.log(self.mini_stream_sector_size, 2)))
f.write(b'\0' * 6) #skip reseverd
write_u32le(f, self.dir_sector_count)
write_u32le(f, self.fat_sector_count)
write_u32le(f, self.dir_sector_start)
write_u32le(f, self.transaction_signature)
write_u32le(f, self.min_stream_max_size)
write_u32le(f, self.minifat_sector_start)
write_u32le(f, self.minifat_sector_count)
write_u32le(f, self.difat_sector_start)
write_u32le(f, self.difat_sector_count)
for i in range(109):
write_u32le(f, self.difat[0][i])
for i in range(self.sector_size - f.tell()):
f.write(b'\0')
def read_header(self):
f = self.f
f.seek(0)
magic = f.read(8)
# logging.debug("magic: %s" % magic.encode("hex"))
logging.debug("magic: %s" % str([magic]))
# clsid = f.read(16)
# logging.debug("clsid: %s" % clsid.encode("hex"))
self.class_id = auid.AUID(bytes_le=f.read(16))
logging.debug("clsid: %s" % str(self.class_id))
self.minor_version = read_u16le(f)
logging.debug("minor_version: %d" % self.minor_version)
self.major_version = read_u16le(f)
logging.debug("major_version: %d" % self.major_version)
byte_order = read_u16le(f)
if byte_order == 0xFFFE:
self.byte_order = 'le'
else:
raise NotImplementedError("endian format:0x%X not supported" % byte_order)
logging.debug("byte_order: %s" % self.byte_order)
size = read_u16le(f)
self.sector_size = pow(2, size)
logging.debug("sector_size: %d -> %d" % (size, self.sector_size))
size = read_u16le(f)
self.mini_stream_sector_size = pow(2, size)
logging.debug("mini_stream_sector_size: %d -> %d" % (size, self.mini_stream_sector_size))
if not self.sector_size in (4096, 512):
raise NotImplementedError("unsupported sector size: %d" % self.sector_size)
if self.mini_stream_sector_size != 64:
raise NotImplementedError("unsupported mini sector size: %d" % self.mini_stream_sector_size)
f.read(6) #skip reseverd
self.dir_sector_count = read_u32le(f)
logging.debug("dir_sector_count: %d" % self.dir_sector_count)
self.fat_sector_count = read_u32le(f)
logging.debug("fat_sector_count: %d" % self.fat_sector_count)
self.dir_sector_start = read_u32le(f)
logging.debug("dir_sector_start: %d" % self.dir_sector_start)
self.transaction_signature = read_u32le(f)
logging.debug("transaction_signature: %d" % self.transaction_signature)
self.min_stream_max_size = read_u32le(f)
logging.debug("min_stream_max_size: %d" % self.min_stream_max_size)
self.minifat_sector_start = read_u32le(f)
logging.debug("minifat_sector_start: %d" % self.minifat_sector_start)
self.minifat_sector_count = read_u32le(f)
logging.debug("minifat_sector_count: %d" % self.minifat_sector_count)
self.difat_sector_start = read_u32le(f)
logging.debug("difat_sector_start: %d" % self.difat_sector_start)
self.difat_sector_count = read_u32le(f)
logging.debug("difat_sector_count: %d" % self.difat_sector_count)
self.difat = [[]]
logging.debug("reading header difat at %d" % f.tell())
for i in range(109):
item = read_u32le(f)
# item = fat_sector_types.get(item, item)
self.difat[0].append(item)
sectors_left = self.difat_sector_count
sid = self.difat_sector_start
# reading difat sectors
while sectors_left:
logging.debug("reading difat sid: %d", sid)
sector_type = fat_sector_types.get(sid, sid)
if not isinstance(sector_type, int):
break
self.difat_chain.append(sid)
f.seek((sid + 1) * self.sector_size)
difat = []
for i in range( (self.sector_size // 4)):
item = read_u32le(f)
difat.append(item)
self.difat.append(difat)
sid = difat[-1]
logging.debug("next difat: %d" % sid)
sectors_left -= 1
def iter_difat(self):
for i, sid in enumerate(self.difat[0]):
yield 0, i, sid
t = 1
for item in self.difat[1:]:
for i, sid in enumerate(item[:-1]):
yield t, i, sid
t+=1
def write_difat(self):
f = self.f
# write header entries
f.seek(76)
logging.debug("writing header difat")
for i in range(109):
write_u32le(f, self.difat[0][i])
for i in range(self.sector_size - f.tell()):
f.write(b'\0')
if self.difat_sector_count == 0:
return
sid = self.difat_sector_start
assert len(self.difat[1:]) == self.difat_sector_count
for table in self.difat[1:]:
sector_type = fat_sector_types.get(sid, sid)
if not isinstance(sector_type, int):
raise IOError("bad difat sector type")
pos = (sid + 1) * self.sector_size
logging.debug("writing difat to sid: %d at: %d" % (sid,pos))
f.seek(pos)
for i in range(self.sector_size // 4):
write_u32le(f, table[i])
sid = table[-1]
def read_fat(self):
f = self.f
self.fat = array(str('I'))
sector_count = 0
fat_sectors = []
for t, i, sid in self.iter_difat():
sector_type = fat_sector_types.get(sid, sid)
if not isinstance(sector_type, int):
continue
fat_sectors.append(sid)
# len(fat_sectors),self.fat_sector_count
# assert len(fat_sectors) == self.fat_sector_count
if len(fat_sectors) != self.fat_sector_count:
logging.warn("fat sector count missmatch difat: %d header: %d" % (len(fat_sectors), self.fat_sector_count))
self.fat_sector_count = len(fat_sectors)
for sid in fat_sectors:
pos = (sid + 1) * self.sector_size
f.seek(pos)
extend_sid_table(f, self.fat, self.sector_size)
sector_count += 1
if sys.byteorder == 'big':
self.fat.byteswap()
for i,v in enumerate(self.fat):
if v == FREESECT:
self.fat_freelist.append(i)
logging.debug("read %d fat sectors ", sector_count)
if self.sector_size == 4096 and len(self.fat) > RANGELOCKSECT:
if self.fat[RANGELOCKSECT] != ENDOFCHAIN:
logging.warn("range lock sector has data")
# logging.debug("fat: %s" % str(pretty_sectors(self.fat)))
def write_fat(self):
logging.debug("writing fat")
f = self.f
sector_count = 0
assert len(self.fat)*4 % self.sector_size == 0
fat_sectors = []
for t, i, sid in self.iter_difat():
sector_type = fat_sector_types.get(sid, sid)
if not isinstance(sector_type, int):
continue
fat_sectors.append(sid)
# check that the difat has enough entries to hold the current fat
assert len(fat_sectors) == len(self.fat)*4 // self.sector_size
element_count = self.sector_size // 4
fat_table_struct = Struct(str('<%dI' % element_count))
for i, sid in enumerate(fat_sectors):
# logging.debug("writing fat to sid: %d" % sid)
f.seek((sid + 1) * self.sector_size)
start = i * element_count
end = start + element_count
f.write(fat_table_struct.pack(*self.fat[start:end]))
def read_minifat(self):
f = self.f
sector_count = 0
self.minifat = array(str('I'))
for sid in self.get_fat_chain(self.minifat_sector_start):
self.minifat_chain.append(sid)
f.seek((sid + 1) * self.sector_size)
extend_sid_table(f, self.minifat, self.sector_size)
sector_count += 1
if sys.byteorder == 'big':
self.minifat.byteswap()
# mini_stream_byte_size = 0
last_used_sector = 0
for i,v in enumerate(self.minifat):
if v == FREESECT:
self.minifat_freelist.append(i)
else:
last_used_sector = i
# mini_stream_byte_size += self.mini_stream_sector_size
mini_stream_byte_size = ((last_used_sector+1) * self.mini_stream_sector_size)
# for i, sect in enumerate(pretty_sectors(self.minifat)):
# print(i, sect)
logging.debug("read %d mini fat sectors", sector_count)
return mini_stream_byte_size
def write_minifat(self):
f = self.f
sector_count = 0
element_count = self.sector_size // 4
fat_table_struct = Struct(str('<%dI' % element_count))
for i, sid in enumerate(self.get_fat_chain(self.minifat_sector_start)):
pos = (sid + 1) * self.sector_size
f.seek(pos)
start = i * element_count
end = start + element_count
f.write(fat_table_struct.pack(*self.minifat[start:end]))
def write_modified_dir_entries(self):
f = self.f
for dir_id in sorted(self.modified):
entry = self.modified[dir_id]
stream_pos = entry.dir_id * 128
chain_index = stream_pos // self.sector_size
sid_offset = stream_pos % self.sector_size
sid = self.dir_fat_chain[chain_index]
pos = ((sid + 1) * self.sector_size) + sid_offset
f.seek(pos)
f.write(entry.data)
# invalidate sector
if sid in self.sector_cache:
del self.sector_cache[sid]
self.modified = {}
def write_dir_entries(self):
self.write_modified_dir_entries()
# clear empty DirEntrys
empty_dir = bytearray(128)
f = self.f
self.dir_freelist.sort()
for dir_id in self.dir_freelist:
stream_pos = dir_id * 128
chain_index = stream_pos // self.sector_size
sid_offset = stream_pos % self.sector_size
sid = self.dir_fat_chain[chain_index]
pos = ((sid + 1) * self.sector_size) + sid_offset
f.seek(pos)
f.write(empty_dir)
def next_free_minifat_sect(self):
idx_per_sect = self.sector_size // self.mini_stream_sector_size
stream_sects = len(self.mini_stream_chain) * idx_per_sect
if self.minifat_freelist:
i = self.minifat_freelist.pop(0)
assert self.minifat[i] == FREESECT
if i+1 > stream_sects:
self.mini_stream_grow()
return i
# if we got here need to add aditional fat
sid = self.next_free_sect()
# logging.warn("growing minifat to sid %d" % sid)
idx_start = len(self.minifat)
idx_end = idx_start + self.sector_size // 4
self.minifat.extend([FREESECT for i in range(idx_start, idx_end)])
self.minifat_freelist.extend([i for i in range(idx_start, idx_end)])
if self.minifat_sector_count == 0:
self.minifat_sector_count = 1
self.minifat_sector_start = sid
else:
self.minifat_sector_count += 1
self.fat[self.minifat_chain[-1]] = sid
self.minifat_chain.append(sid)
self.fat[sid] = ENDOFCHAIN
return self.next_free_minifat_sect()
def next_free_sect(self):
if self.fat_freelist:
# print("using fat free list")
i = self.fat_freelist.pop(0)
assert self.fat[i] == FREESECT
# Handle Range Lock Sector
if i == RANGELOCKSECT and self.sector_size == 4096:
self.fat[i] = ENDOFCHAIN
logging.warning("range lock sector in fat freelist, marking ENDOFCHAIN")
return self.next_free_sect()
return i
# if we got here need to add aditional fat
# logging.debug("fat full, growing")
difat_table = None
difat_index = None
for t, i, v in self.iter_difat():
if v == FREESECT:
difat_table = t
difat_index = i
break
new_difat_sect = None
if difat_index is None:
new_difat_sect = len(self.fat) + 1
logging.debug("adding new difat to sid: %d" % new_difat_sect)
if self.difat_sector_count == 0:
self.difat_sector_start = new_difat_sect
self.difat_sector_count = 1
else:
self.difat[-1][-1] = new_difat_sect
self.difat_sector_count += 1
# add difat table
difat = []
for i in range(self.sector_size // 4):
difat.append(FREESECT)
difat[-1] == ENDOFCHAIN
self.difat.append(difat)
for t, i, v in self.iter_difat():
if v == FREESECT:
difat_table = t
difat_index = i
break
new_fat_sect = len(self.fat)
# logging.debug("adding new fat to sid: %d" % new_fat_sect)
self.difat[difat_table][difat_index] = new_fat_sect
# grow fat entries
idx_start = len(self.fat)
idx_end = idx_start + (self.sector_size // 4)
self.fat.extend([FREESECT for i in range(self.sector_size // 4)])
non_free_sids = set([new_fat_sect, new_difat_sect])
# Handle Range Lock Sector
# The range lock sector is the sector
# that covers file offsets 0x7FFFFF00-0x7FFFFFFF in the file
if RANGELOCKSECT < idx_end and RANGELOCKSECT > idx_start and self.sector_size == 4096:
non_free_sids.add(RANGELOCKSECT)
logging.debug("adding range lock")
self.fat[RANGELOCKSECT] = ENDOFCHAIN
freelist = [i for i in range(idx_start, idx_end) if i not in non_free_sids]
self.fat_freelist.extend(freelist)
self.fat[new_fat_sect] = FATSECT
self.fat_sector_count += 1
if not new_difat_sect is None:
self.fat[new_difat_sect] = DIFSECT
return self.next_free_sect()
def read_sector_data(self, sid):
sector_data = self.sector_cache.get(sid, None)
if sector_data is not None:
return sector_data
else:
pos = (sid + 1) * self.sector_size
self.f.seek(pos)
sector_data = bytearray(self.sector_size)
#NOTE: if requested sector doesn't exsit or
# is truncated will padd with zeros, expected behavour
bytes_read = self.f.readinto(sector_data)
self.sector_cache[sid] = sector_data
return sector_data
def get_sid_offset(self, abs_pos):
sid, sid_offset = divmod(abs_pos, self.sector_size)
return sid-1, sid_offset
def dir_entry_sid_offset(self, dir_id):
stream_pos = dir_id * 128
chain_index, sid_offset = divmod(stream_pos, self.sector_size)
sid = self.dir_fat_chain[chain_index]
return sid, sid_offset
def dir_entry_pos(self, dir_id):
sid, sid_offset = self.dir_entry_sid_offset(dir_id)
pos = ((sid + 1) * self.sector_size) + sid_offset
return pos
def read_dir_entry(self, dir_id, parent = None):
if dir_id is None:
return None
entry = self.dir_cache.get(dir_id, None)
if entry is not None:
return entry
# assert not dir_id in self.dir_freelist
stream_pos = dir_id * 128
chain_index = stream_pos // self.sector_size
sid_offset = stream_pos % self.sector_size
sid = self.dir_fat_chain[chain_index]
sector_data = self.read_sector_data(sid)
data= bytearray(sector_data[sid_offset:sid_offset+128])
entry = DirEntry(self, dir_id, data=data)
entry.parent = parent
self.dir_cache[dir_id] = entry
return entry
def clear_sector(self, sid):
sector_pos = (sid + 1) * self.sector_size
self.f.seek(sector_pos)
# for i in range(self.sector_size):
self.f.write(bytearray(self.sector_size))
def next_free_dir_id(self):
# use free list first
if self.dir_freelist:
return self.dir_freelist.pop(0)
f = self.f
sect = self.fat_chain_append(self.dir_fat_chain[-1])
self.dir_fat_chain.append(sect)
self.dir_sector_count += 1
first_dir_id = (len(self.dir_fat_chain) - 1) * self.sector_size // 128
last_dir_id = first_dir_id + (self.sector_size // 128)
self.dir_freelist.extend(range(first_dir_id, last_dir_id))
return self.next_free_dir_id()
def get_fat_chain(self, start_sid, minifat=False):
fat = self.fat
fat_name = "FAT"
if minifat:
fat = self.minifat
fat_name = "MINIFAT"
# Floyd's Tortoise and Hare cycle-finding algorithm
a = start_sid
b = start_sid
sectors = []
if start_sid in (None, ENDOFCHAIN, FREESECT, DIFSECT, FATSECT):
return []
while b != ENDOFCHAIN:
sectors.append(b)
b = fat[b]
if a != ENDOFCHAIN:
a = fat[a]
if a != ENDOFCHAIN:
a = fat[a]
if a == b:
raise CompoundFileBinaryError('cyclic %s fat chain found starting at %d' % (fat_name, start_sid))
return sectors
def mini_stream_grow(self):
sid = self.next_free_sect()
# logging.debug("adding to mini stream fat sid: %d" % sid)
if not self.mini_stream_chain:
self.mini_stream_chain = [sid]
self.root.sector_id = sid
else:
self.fat[self.mini_stream_chain[-1]] = sid
self.mini_stream_chain.append(sid)
self.fat[sid] = ENDOFCHAIN
def fat_chain_append(self, start_sid, minifat=False):
if minifat:
sect = self.next_free_minifat_sect()
# logging.debug("creating new mini sector: %d" % sect)
fat = self.minifat
else:
sect = self.next_free_sect()
# logging.debug("creating new sector: %d" % sect)
fat = self.fat
if start_sid is None:
fat[sect] = ENDOFCHAIN
else:
fat_chain = self.get_fat_chain(start_sid, minifat)
assert fat_chain
fat[fat_chain[-1]] = sect
fat[sect] = ENDOFCHAIN
return sect
def free_fat_chain(self, start_sid, minifat=False):
fat =self.fat
if minifat:
fat = self.minifat
for sid in self.get_fat_chain(start_sid, minifat):
fat[sid] = FREESECT
if minifat:
self.minifat_freelist.insert(0, sid)
else:
self.fat_freelist.insert(0, sid)
def create_dir_entry(self, path, dir_type='storage', class_id=None):
if self.exists(path):
raise ValueError("%s already exists" % path)
dirname = os.path.dirname(path)
basename = os.path.basename(path)
root = self.find(dirname)
if root is None:
raise ValueError("parent dirname does not exist: %s" % dirname)
if not root.type in ('storage', 'root storage'):
raise ValueError("can not add entry to non storage type")
dir_id = self.next_free_dir_id()
logging.debug("next dir id %d" % dir_id)
entry = DirEntry(self, dir_id)
entry.name = basename
entry.type = dir_type
entry.class_id = class_id
# TODO: Implement a Red Black tree
# all new DirEntries are black, so there is no tree balancing.
# AAF Low-Level Container Specification says its alright to do this.
entry.color = 'black'
root.add_child(entry)
self.dir_cache[dir_id] = entry
return entry
def free_dir_entry(self, entry):
# add freelist
self.dir_freelist.append(entry.dir_id)
# remove from dir caches
if entry.dir_id in self.dir_cache:
del self.dir_cache[entry.dir_id]
if entry.dir_id in self.children_cache:
del self.children_cache[entry.dir_id]
if entry.dir_id in self.modified:
del self.modified[entry.dir_id]
entry.dir_id = None
def remove(self, path):
"""
Removes both streams and storage DirEntry types from file.
storage type entries need to be empty dirs.
"""
entry = self.find(path)
if not entry:
raise ValueError("%s does not exists" % path)
if entry.type == 'root storage':
raise ValueError("can no remove root entry")
if entry.type == "storage" and not entry.child_id is None:
raise ValueError("storage contains children")
entry.pop()
# remove stream data
if entry.type == "stream":
self.free_fat_chain(entry.sector_id, entry.byte_size < self.min_stream_max_size)
self.free_dir_entry(entry)
def rmtree(self, path):
"""
Removes directory structure, similar to shutil.rmtree.
"""
for root, storage, streams in self.walk(path, topdown=False):
for item in streams:
self.free_fat_chain(item.sector_id, item.byte_size < self.min_stream_max_size)
self.free_dir_entry(item)
for item in storage:
self.free_dir_entry(item)
root.child_id = None
# remove root item
self.remove(path)
def listdir(self, path = None):
"""
Return a list containing the ``DirEntry`` objects in the directory
given by path.
"""
result = self.listdir_dict(path)
return result.values()
def listdir_dict(self, path = None):
"""
Return a dict containing the ``DirEntry`` objects in the directory
given by path with name of the dir as key.
"""
if path is None:
path = self.root
root = self.find(path)
if root is None:
raise ValueError("unable to find dir: %s" % str(path))
if not root.isdir():
raise ValueError("can only list storage types")
children = self.children_cache.get(root.dir_id, None)
if children is not None:
return children
child = root.child()
result = {}
if not child:
self.children_cache[root.dir_id] = result
return result
dir_per_sector = self.sector_size // 128
max_dirs_entries = self.dir_sector_count * dir_per_sector
stack = deque([child])
count = 0
while stack:
current = stack.pop()
result[current.name] = current
count += 1
if count > max_dirs_entries:
raise CompoundFileBinaryError("corrupt folder structure")
left = current.left()
if left:
stack.append(left)
right = current.right()
if right:
stack.append(right)
self.children_cache[root.dir_id] = result
return result
def walk(self, path = None, topdown=True):
"""
Similar to :func:`os.walk`, yeields a 3-tuple ``(root, storage_items, stream_items)``
"""
if path is None:
path = self.root
root = self.find(path)
if not root.isdir():
raise ValueError("can only walk storage types")
if not root.child_id:
return
if topdown:
storage_items = []
stream_items = []
for item in self.listdir(root):
if item.isdir():
storage_items.append(item)
else:
stream_items.append(item)
yield root, storage_items, stream_items
for item in storage_items:
for root, storage_items, stream_items in self.walk(item):
yield root, storage_items, stream_items
else:
def topdown_visit_node(root):
storage_items = []
stream_items = []
for item in self.listdir(root):
if item.isdir():
for sub_root, sub_storage, sub_stream in topdown_visit_node(item):
yield sub_root, sub_storage, sub_stream
storage_items.append(item)
else:
stream_items.append(item)
yield root, storage_items, stream_items
for root_item, storage, stream in topdown_visit_node(root):
yield root_item, storage, stream
def exists(self, path):
"""
Return ``True`` if path refers to a existing path.
"""
if self.find(path) is None:
return False
return True
def makedir(self, path, class_id=None):
"""
Create a storage DirEntry name path
"""
return self.create_dir_entry(path, dir_type='storage', class_id=class_id)
def makedirs(self, path):
"""
Recursive storage DirEntry creation function.
"""
root = ""
assert path.startswith('/')
p = path.strip('/')
for item in p.split('/'):
root += "/" + item
if not self.exists(root):
self.makedir(root)
return self.find(path)
def move(self, src, dst):
"""
Moves ``DirEntry`` from src to dst
"""
src_entry = self.find(src)
if src_entry is None:
raise ValueError("src path does not exist: %s" % src)
if dst.endswith('/'):
dst += src_entry.name
if self.exists(dst):
raise ValueError("dst path already exist: %s" % dst)
if dst == '/' or src == '/':
raise ValueError("cannot overwrite root dir")
split_path = dst.strip('/').split('/')
dst_basename = split_path[-1]
dst_dirname = '/' + '/'.join(split_path[:-1])
# print(dst)
# print(dst_basename, dst_dirname)
dst_entry = self.find(dst_dirname)
if dst_entry is None:
raise ValueError("src path does not exist: %s" % dst_dirname)
if not dst_entry.isdir():
raise ValueError("dst dirname cannot be stream: %s" % dst_dirname)
# src_entry.parent.remove_child(src_entry)
src_entry.pop()
src_entry.parent = None
src_entry.name = dst_basename
dst_entry.add_child(src_entry)
self.children_cache[dst_entry.dir_id][src_entry.name] = src_entry
return src_entry
def open(self, path, mode='r'):
"""Open stream, returning ``Stream`` object"""
entry = self.find(path)
if entry is None:
if mode == 'r':
raise ValueError("stream does not exists: %s" % path)
entry = self.create_dir_entry(path, 'stream', None)
else:
if not entry.isfile():
raise ValueError("can only open stream type DirEntry's")
if mode == 'w':
logging.debug("stream: %s exists, overwriting" % path)
self.free_fat_chain(entry.sector_id, entry.byte_size < self.min_stream_max_size)
entry.sector_id = None
entry.byte_size = 0
entry.class_id = None
elif mode == 'rw':
pass
s = Stream(self, entry, mode)
return s
|
markreidvfx/pyaaf2
|
aaf2/cfb.py
|
CompoundFileBinary.walk
|
python
|
def walk(self, path = None, topdown=True):
if path is None:
path = self.root
root = self.find(path)
if not root.isdir():
raise ValueError("can only walk storage types")
if not root.child_id:
return
if topdown:
storage_items = []
stream_items = []
for item in self.listdir(root):
if item.isdir():
storage_items.append(item)
else:
stream_items.append(item)
yield root, storage_items, stream_items
for item in storage_items:
for root, storage_items, stream_items in self.walk(item):
yield root, storage_items, stream_items
else:
def topdown_visit_node(root):
storage_items = []
stream_items = []
for item in self.listdir(root):
if item.isdir():
for sub_root, sub_storage, sub_stream in topdown_visit_node(item):
yield sub_root, sub_storage, sub_stream
storage_items.append(item)
else:
stream_items.append(item)
yield root, storage_items, stream_items
for root_item, storage, stream in topdown_visit_node(root):
yield root_item, storage, stream
|
Similar to :func:`os.walk`, yeields a 3-tuple ``(root, storage_items, stream_items)``
|
train
|
https://github.com/markreidvfx/pyaaf2/blob/37de8c10d3c3495cc00c705eb6c5048bc4a7e51f/aaf2/cfb.py#L1741-L1789
|
[
"def find(self, path):\n \"\"\"\n find a ``DirEntry`` located at *path*. Returns ``None`` if path\n does not exist.\n \"\"\"\n\n if isinstance(path, DirEntry):\n return path\n\n if path == \"/\":\n return self.root\n\n split_path = path.lstrip('/').split(\"/\")\n\n i = 0\n root = self.root\n\n while True:\n\n children = self.listdir_dict(root)\n match = children.get(split_path[i], None)\n\n if match:\n if i == len(split_path) - 1:\n return match\n root = match\n i += 1\n else:\n return None\n"
] |
class CompoundFileBinary(object):
def __init__(self, file_object, mode='rb', sector_size=4096):
self.f = file_object
self.difat = [[]]
self.fat = array(str('I'))
self.fat_freelist = []
self.minifat = array(str('I'))
self.minifat_freelist = []
self.difat_chain = []
self.minifat_chain = []
self.dir_fat_chain = []
self.mini_stream_chain = []
self.modified = {}
self.sector_cache = LRUCacheDict()
self.dir_cache = weakref.WeakValueDictionary()
self.children_cache = LRUCacheDict()
self.dir_freelist = []
self.debug_grow = False
self.is_open = True
if isinstance(self.f, BytesIO):
self.mode = 'wb+'
else:
self.mode = mode
if self.mode in ("r", "r+", "rb", 'rb+'):
self.read_header()
self.read_fat()
mini_stream_byte_size = self.read_minifat()
# create dir_fat_chain and read root dir entry
self.dir_fat_chain = self.get_fat_chain(self.dir_sector_start)
if len(self.dir_fat_chain) != self.dir_sector_count:
logging.info("read dir_sector_count missmatch, using fat chain length")
self.dir_sector_count = len(self.dir_fat_chain)
logging.debug("read %d dir sectors" % len(self.dir_fat_chain))
self.root = self.read_dir_entry(0)
self.dir_cache[0] = self.root
# create mini stream fat chain
if self.minifat_sector_count:
self.mini_stream_chain = self.get_fat_chain(self.root.sector_id)
if self.root.sector_id is not None and mini_stream_byte_size != self.root.byte_size:
message = "mini stream size missmatch: %d != %d, using size from minifat"
logging.warn(message % (self.root.byte_size, mini_stream_byte_size))
else:
self.setup_empty(sector_size)
self.write_header()
logging.debug("pos: %d" % self.f.tell())
logging.debug("writing root dir sector")
self.root.write()
self.f.write(bytearray(self.sector_size - 128))
self.write_fat()
def close(self):
if self.mode in ("r", "rb"):
return
# caculate mini stream size
if self.root.sector_id is not None:
# I cannot find this documented anywhere but the size of the mini stream
# is the size up to the last mini sector is uses. Not the total Non FREESECT's.
# If self.root.byte_size is not set correctly the some appications will crash hard...
# find last non-free sect
for i,v in enumerate(reversed(self.minifat)):
if v != FREESECT:
break
last_used_sector_id = len(self.minifat) - i
mini_stream_byte_size = (last_used_sector_id * self.mini_stream_sector_size)
self.root.byte_size = mini_stream_byte_size
# Truncate ministream
s = Stream(self, self.root, 'rw')
s.truncate(mini_stream_byte_size)
self.write_header()
self.write_difat()
self.write_fat()
self.write_minifat()
self.write_dir_entries()
# Truncate file to the last free sector
for i,v in enumerate(reversed(self.fat)):
if v != FREESECT:
break
last_used_sector_id = len(self.fat) - i
pos = (last_used_sector_id + 1) * self.sector_size
self.f.seek(pos)
self.f.truncate()
self.is_open = False
def setup_empty(self, sector_size):
if sector_size == 4096:
self.class_id = auid.AUID("0d010201-0200-0000-060e-2b3403020101")
elif sector_size == 512:
self.class_id = auid.AUID("42464141-000d-4d4f-060e-2b34010101ff")
else:
raise ValueError("sector size must be 4096 or 512")
self.major_version = 4
self.minor_version = 62
self.byte_order = "le"
self.sector_size = sector_size
self.mini_stream_sector_size = 64
self.dir_sector_count = 1
self.fat_sector_count = 1
self.dir_sector_start = 0
self.transaction_signature = 1
self.min_stream_max_size = 4096
self.minifat_sector_start = FREESECT
self.minifat_sector_count = 0
self.difat_sector_start = FREESECT
self.difat_sector_count = 0
self.difat = [[]]
for i in range(109):
self.difat[0].append(FREESECT)
self.difat[0][0] = 1
for i in range(self.sector_size // 4):
self.fat.append(FREESECT)
if i > 1:
self.fat_freelist.append(i)
self.fat[0] = ENDOFCHAIN # end of dir chain
self.fat[self.difat[0][0]] = FATSECT
self.root = DirEntry(self, 0)
self.root.name = 'Root Entry'
self.root.sector_id = None
self.root.type = 'root storage'
self.root.class_id = auid.AUID("b3b398a5-1c90-11d4-8053-080036210804")
self.dir_cache[0] = self.root
self.dir_fat_chain = [0]
# raise NotImplementedError("mode: %s supported not implemented" % self.f.mode)
def write_header(self):
logging.debug("writiing header")
f = self.f
f.seek(0)
f.write(b'\xd0\xcf\x11\xe0\xa1\xb1\x1a\xe1') # Magic
f.write(self.class_id.bytes_le)
write_u16le(f, self.minor_version)
write_u16le(f, self.major_version)
write_u16le(f, 0xFFFE) #byte order le
write_u16le(f, int(math.log(self.sector_size, 2)))
write_u16le(f, int(math.log(self.mini_stream_sector_size, 2)))
f.write(b'\0' * 6) #skip reseverd
write_u32le(f, self.dir_sector_count)
write_u32le(f, self.fat_sector_count)
write_u32le(f, self.dir_sector_start)
write_u32le(f, self.transaction_signature)
write_u32le(f, self.min_stream_max_size)
write_u32le(f, self.minifat_sector_start)
write_u32le(f, self.minifat_sector_count)
write_u32le(f, self.difat_sector_start)
write_u32le(f, self.difat_sector_count)
for i in range(109):
write_u32le(f, self.difat[0][i])
for i in range(self.sector_size - f.tell()):
f.write(b'\0')
def read_header(self):
f = self.f
f.seek(0)
magic = f.read(8)
# logging.debug("magic: %s" % magic.encode("hex"))
logging.debug("magic: %s" % str([magic]))
# clsid = f.read(16)
# logging.debug("clsid: %s" % clsid.encode("hex"))
self.class_id = auid.AUID(bytes_le=f.read(16))
logging.debug("clsid: %s" % str(self.class_id))
self.minor_version = read_u16le(f)
logging.debug("minor_version: %d" % self.minor_version)
self.major_version = read_u16le(f)
logging.debug("major_version: %d" % self.major_version)
byte_order = read_u16le(f)
if byte_order == 0xFFFE:
self.byte_order = 'le'
else:
raise NotImplementedError("endian format:0x%X not supported" % byte_order)
logging.debug("byte_order: %s" % self.byte_order)
size = read_u16le(f)
self.sector_size = pow(2, size)
logging.debug("sector_size: %d -> %d" % (size, self.sector_size))
size = read_u16le(f)
self.mini_stream_sector_size = pow(2, size)
logging.debug("mini_stream_sector_size: %d -> %d" % (size, self.mini_stream_sector_size))
if not self.sector_size in (4096, 512):
raise NotImplementedError("unsupported sector size: %d" % self.sector_size)
if self.mini_stream_sector_size != 64:
raise NotImplementedError("unsupported mini sector size: %d" % self.mini_stream_sector_size)
f.read(6) #skip reseverd
self.dir_sector_count = read_u32le(f)
logging.debug("dir_sector_count: %d" % self.dir_sector_count)
self.fat_sector_count = read_u32le(f)
logging.debug("fat_sector_count: %d" % self.fat_sector_count)
self.dir_sector_start = read_u32le(f)
logging.debug("dir_sector_start: %d" % self.dir_sector_start)
self.transaction_signature = read_u32le(f)
logging.debug("transaction_signature: %d" % self.transaction_signature)
self.min_stream_max_size = read_u32le(f)
logging.debug("min_stream_max_size: %d" % self.min_stream_max_size)
self.minifat_sector_start = read_u32le(f)
logging.debug("minifat_sector_start: %d" % self.minifat_sector_start)
self.minifat_sector_count = read_u32le(f)
logging.debug("minifat_sector_count: %d" % self.minifat_sector_count)
self.difat_sector_start = read_u32le(f)
logging.debug("difat_sector_start: %d" % self.difat_sector_start)
self.difat_sector_count = read_u32le(f)
logging.debug("difat_sector_count: %d" % self.difat_sector_count)
self.difat = [[]]
logging.debug("reading header difat at %d" % f.tell())
for i in range(109):
item = read_u32le(f)
# item = fat_sector_types.get(item, item)
self.difat[0].append(item)
sectors_left = self.difat_sector_count
sid = self.difat_sector_start
# reading difat sectors
while sectors_left:
logging.debug("reading difat sid: %d", sid)
sector_type = fat_sector_types.get(sid, sid)
if not isinstance(sector_type, int):
break
self.difat_chain.append(sid)
f.seek((sid + 1) * self.sector_size)
difat = []
for i in range( (self.sector_size // 4)):
item = read_u32le(f)
difat.append(item)
self.difat.append(difat)
sid = difat[-1]
logging.debug("next difat: %d" % sid)
sectors_left -= 1
def iter_difat(self):
for i, sid in enumerate(self.difat[0]):
yield 0, i, sid
t = 1
for item in self.difat[1:]:
for i, sid in enumerate(item[:-1]):
yield t, i, sid
t+=1
def write_difat(self):
f = self.f
# write header entries
f.seek(76)
logging.debug("writing header difat")
for i in range(109):
write_u32le(f, self.difat[0][i])
for i in range(self.sector_size - f.tell()):
f.write(b'\0')
if self.difat_sector_count == 0:
return
sid = self.difat_sector_start
assert len(self.difat[1:]) == self.difat_sector_count
for table in self.difat[1:]:
sector_type = fat_sector_types.get(sid, sid)
if not isinstance(sector_type, int):
raise IOError("bad difat sector type")
pos = (sid + 1) * self.sector_size
logging.debug("writing difat to sid: %d at: %d" % (sid,pos))
f.seek(pos)
for i in range(self.sector_size // 4):
write_u32le(f, table[i])
sid = table[-1]
def read_fat(self):
f = self.f
self.fat = array(str('I'))
sector_count = 0
fat_sectors = []
for t, i, sid in self.iter_difat():
sector_type = fat_sector_types.get(sid, sid)
if not isinstance(sector_type, int):
continue
fat_sectors.append(sid)
# len(fat_sectors),self.fat_sector_count
# assert len(fat_sectors) == self.fat_sector_count
if len(fat_sectors) != self.fat_sector_count:
logging.warn("fat sector count missmatch difat: %d header: %d" % (len(fat_sectors), self.fat_sector_count))
self.fat_sector_count = len(fat_sectors)
for sid in fat_sectors:
pos = (sid + 1) * self.sector_size
f.seek(pos)
extend_sid_table(f, self.fat, self.sector_size)
sector_count += 1
if sys.byteorder == 'big':
self.fat.byteswap()
for i,v in enumerate(self.fat):
if v == FREESECT:
self.fat_freelist.append(i)
logging.debug("read %d fat sectors ", sector_count)
if self.sector_size == 4096 and len(self.fat) > RANGELOCKSECT:
if self.fat[RANGELOCKSECT] != ENDOFCHAIN:
logging.warn("range lock sector has data")
# logging.debug("fat: %s" % str(pretty_sectors(self.fat)))
def write_fat(self):
logging.debug("writing fat")
f = self.f
sector_count = 0
assert len(self.fat)*4 % self.sector_size == 0
fat_sectors = []
for t, i, sid in self.iter_difat():
sector_type = fat_sector_types.get(sid, sid)
if not isinstance(sector_type, int):
continue
fat_sectors.append(sid)
# check that the difat has enough entries to hold the current fat
assert len(fat_sectors) == len(self.fat)*4 // self.sector_size
element_count = self.sector_size // 4
fat_table_struct = Struct(str('<%dI' % element_count))
for i, sid in enumerate(fat_sectors):
# logging.debug("writing fat to sid: %d" % sid)
f.seek((sid + 1) * self.sector_size)
start = i * element_count
end = start + element_count
f.write(fat_table_struct.pack(*self.fat[start:end]))
def read_minifat(self):
f = self.f
sector_count = 0
self.minifat = array(str('I'))
for sid in self.get_fat_chain(self.minifat_sector_start):
self.minifat_chain.append(sid)
f.seek((sid + 1) * self.sector_size)
extend_sid_table(f, self.minifat, self.sector_size)
sector_count += 1
if sys.byteorder == 'big':
self.minifat.byteswap()
# mini_stream_byte_size = 0
last_used_sector = 0
for i,v in enumerate(self.minifat):
if v == FREESECT:
self.minifat_freelist.append(i)
else:
last_used_sector = i
# mini_stream_byte_size += self.mini_stream_sector_size
mini_stream_byte_size = ((last_used_sector+1) * self.mini_stream_sector_size)
# for i, sect in enumerate(pretty_sectors(self.minifat)):
# print(i, sect)
logging.debug("read %d mini fat sectors", sector_count)
return mini_stream_byte_size
def write_minifat(self):
f = self.f
sector_count = 0
element_count = self.sector_size // 4
fat_table_struct = Struct(str('<%dI' % element_count))
for i, sid in enumerate(self.get_fat_chain(self.minifat_sector_start)):
pos = (sid + 1) * self.sector_size
f.seek(pos)
start = i * element_count
end = start + element_count
f.write(fat_table_struct.pack(*self.minifat[start:end]))
def write_modified_dir_entries(self):
f = self.f
for dir_id in sorted(self.modified):
entry = self.modified[dir_id]
stream_pos = entry.dir_id * 128
chain_index = stream_pos // self.sector_size
sid_offset = stream_pos % self.sector_size
sid = self.dir_fat_chain[chain_index]
pos = ((sid + 1) * self.sector_size) + sid_offset
f.seek(pos)
f.write(entry.data)
# invalidate sector
if sid in self.sector_cache:
del self.sector_cache[sid]
self.modified = {}
def write_dir_entries(self):
self.write_modified_dir_entries()
# clear empty DirEntrys
empty_dir = bytearray(128)
f = self.f
self.dir_freelist.sort()
for dir_id in self.dir_freelist:
stream_pos = dir_id * 128
chain_index = stream_pos // self.sector_size
sid_offset = stream_pos % self.sector_size
sid = self.dir_fat_chain[chain_index]
pos = ((sid + 1) * self.sector_size) + sid_offset
f.seek(pos)
f.write(empty_dir)
def next_free_minifat_sect(self):
idx_per_sect = self.sector_size // self.mini_stream_sector_size
stream_sects = len(self.mini_stream_chain) * idx_per_sect
if self.minifat_freelist:
i = self.minifat_freelist.pop(0)
assert self.minifat[i] == FREESECT
if i+1 > stream_sects:
self.mini_stream_grow()
return i
# if we got here need to add aditional fat
sid = self.next_free_sect()
# logging.warn("growing minifat to sid %d" % sid)
idx_start = len(self.minifat)
idx_end = idx_start + self.sector_size // 4
self.minifat.extend([FREESECT for i in range(idx_start, idx_end)])
self.minifat_freelist.extend([i for i in range(idx_start, idx_end)])
if self.minifat_sector_count == 0:
self.minifat_sector_count = 1
self.minifat_sector_start = sid
else:
self.minifat_sector_count += 1
self.fat[self.minifat_chain[-1]] = sid
self.minifat_chain.append(sid)
self.fat[sid] = ENDOFCHAIN
return self.next_free_minifat_sect()
def next_free_sect(self):
if self.fat_freelist:
# print("using fat free list")
i = self.fat_freelist.pop(0)
assert self.fat[i] == FREESECT
# Handle Range Lock Sector
if i == RANGELOCKSECT and self.sector_size == 4096:
self.fat[i] = ENDOFCHAIN
logging.warning("range lock sector in fat freelist, marking ENDOFCHAIN")
return self.next_free_sect()
return i
# if we got here need to add aditional fat
# logging.debug("fat full, growing")
difat_table = None
difat_index = None
for t, i, v in self.iter_difat():
if v == FREESECT:
difat_table = t
difat_index = i
break
new_difat_sect = None
if difat_index is None:
new_difat_sect = len(self.fat) + 1
logging.debug("adding new difat to sid: %d" % new_difat_sect)
if self.difat_sector_count == 0:
self.difat_sector_start = new_difat_sect
self.difat_sector_count = 1
else:
self.difat[-1][-1] = new_difat_sect
self.difat_sector_count += 1
# add difat table
difat = []
for i in range(self.sector_size // 4):
difat.append(FREESECT)
difat[-1] == ENDOFCHAIN
self.difat.append(difat)
for t, i, v in self.iter_difat():
if v == FREESECT:
difat_table = t
difat_index = i
break
new_fat_sect = len(self.fat)
# logging.debug("adding new fat to sid: %d" % new_fat_sect)
self.difat[difat_table][difat_index] = new_fat_sect
# grow fat entries
idx_start = len(self.fat)
idx_end = idx_start + (self.sector_size // 4)
self.fat.extend([FREESECT for i in range(self.sector_size // 4)])
non_free_sids = set([new_fat_sect, new_difat_sect])
# Handle Range Lock Sector
# The range lock sector is the sector
# that covers file offsets 0x7FFFFF00-0x7FFFFFFF in the file
if RANGELOCKSECT < idx_end and RANGELOCKSECT > idx_start and self.sector_size == 4096:
non_free_sids.add(RANGELOCKSECT)
logging.debug("adding range lock")
self.fat[RANGELOCKSECT] = ENDOFCHAIN
freelist = [i for i in range(idx_start, idx_end) if i not in non_free_sids]
self.fat_freelist.extend(freelist)
self.fat[new_fat_sect] = FATSECT
self.fat_sector_count += 1
if not new_difat_sect is None:
self.fat[new_difat_sect] = DIFSECT
return self.next_free_sect()
def read_sector_data(self, sid):
sector_data = self.sector_cache.get(sid, None)
if sector_data is not None:
return sector_data
else:
pos = (sid + 1) * self.sector_size
self.f.seek(pos)
sector_data = bytearray(self.sector_size)
#NOTE: if requested sector doesn't exsit or
# is truncated will padd with zeros, expected behavour
bytes_read = self.f.readinto(sector_data)
self.sector_cache[sid] = sector_data
return sector_data
def get_sid_offset(self, abs_pos):
sid, sid_offset = divmod(abs_pos, self.sector_size)
return sid-1, sid_offset
def dir_entry_sid_offset(self, dir_id):
stream_pos = dir_id * 128
chain_index, sid_offset = divmod(stream_pos, self.sector_size)
sid = self.dir_fat_chain[chain_index]
return sid, sid_offset
def dir_entry_pos(self, dir_id):
sid, sid_offset = self.dir_entry_sid_offset(dir_id)
pos = ((sid + 1) * self.sector_size) + sid_offset
return pos
def read_dir_entry(self, dir_id, parent = None):
if dir_id is None:
return None
entry = self.dir_cache.get(dir_id, None)
if entry is not None:
return entry
# assert not dir_id in self.dir_freelist
stream_pos = dir_id * 128
chain_index = stream_pos // self.sector_size
sid_offset = stream_pos % self.sector_size
sid = self.dir_fat_chain[chain_index]
sector_data = self.read_sector_data(sid)
data= bytearray(sector_data[sid_offset:sid_offset+128])
entry = DirEntry(self, dir_id, data=data)
entry.parent = parent
self.dir_cache[dir_id] = entry
return entry
def clear_sector(self, sid):
sector_pos = (sid + 1) * self.sector_size
self.f.seek(sector_pos)
# for i in range(self.sector_size):
self.f.write(bytearray(self.sector_size))
def next_free_dir_id(self):
# use free list first
if self.dir_freelist:
return self.dir_freelist.pop(0)
f = self.f
sect = self.fat_chain_append(self.dir_fat_chain[-1])
self.dir_fat_chain.append(sect)
self.dir_sector_count += 1
first_dir_id = (len(self.dir_fat_chain) - 1) * self.sector_size // 128
last_dir_id = first_dir_id + (self.sector_size // 128)
self.dir_freelist.extend(range(first_dir_id, last_dir_id))
return self.next_free_dir_id()
def get_fat_chain(self, start_sid, minifat=False):
fat = self.fat
fat_name = "FAT"
if minifat:
fat = self.minifat
fat_name = "MINIFAT"
# Floyd's Tortoise and Hare cycle-finding algorithm
a = start_sid
b = start_sid
sectors = []
if start_sid in (None, ENDOFCHAIN, FREESECT, DIFSECT, FATSECT):
return []
while b != ENDOFCHAIN:
sectors.append(b)
b = fat[b]
if a != ENDOFCHAIN:
a = fat[a]
if a != ENDOFCHAIN:
a = fat[a]
if a == b:
raise CompoundFileBinaryError('cyclic %s fat chain found starting at %d' % (fat_name, start_sid))
return sectors
def mini_stream_grow(self):
sid = self.next_free_sect()
# logging.debug("adding to mini stream fat sid: %d" % sid)
if not self.mini_stream_chain:
self.mini_stream_chain = [sid]
self.root.sector_id = sid
else:
self.fat[self.mini_stream_chain[-1]] = sid
self.mini_stream_chain.append(sid)
self.fat[sid] = ENDOFCHAIN
def fat_chain_append(self, start_sid, minifat=False):
if minifat:
sect = self.next_free_minifat_sect()
# logging.debug("creating new mini sector: %d" % sect)
fat = self.minifat
else:
sect = self.next_free_sect()
# logging.debug("creating new sector: %d" % sect)
fat = self.fat
if start_sid is None:
fat[sect] = ENDOFCHAIN
else:
fat_chain = self.get_fat_chain(start_sid, minifat)
assert fat_chain
fat[fat_chain[-1]] = sect
fat[sect] = ENDOFCHAIN
return sect
def free_fat_chain(self, start_sid, minifat=False):
fat =self.fat
if minifat:
fat = self.minifat
for sid in self.get_fat_chain(start_sid, minifat):
fat[sid] = FREESECT
if minifat:
self.minifat_freelist.insert(0, sid)
else:
self.fat_freelist.insert(0, sid)
def create_dir_entry(self, path, dir_type='storage', class_id=None):
if self.exists(path):
raise ValueError("%s already exists" % path)
dirname = os.path.dirname(path)
basename = os.path.basename(path)
root = self.find(dirname)
if root is None:
raise ValueError("parent dirname does not exist: %s" % dirname)
if not root.type in ('storage', 'root storage'):
raise ValueError("can not add entry to non storage type")
dir_id = self.next_free_dir_id()
logging.debug("next dir id %d" % dir_id)
entry = DirEntry(self, dir_id)
entry.name = basename
entry.type = dir_type
entry.class_id = class_id
# TODO: Implement a Red Black tree
# all new DirEntries are black, so there is no tree balancing.
# AAF Low-Level Container Specification says its alright to do this.
entry.color = 'black'
root.add_child(entry)
self.dir_cache[dir_id] = entry
return entry
def free_dir_entry(self, entry):
# add freelist
self.dir_freelist.append(entry.dir_id)
# remove from dir caches
if entry.dir_id in self.dir_cache:
del self.dir_cache[entry.dir_id]
if entry.dir_id in self.children_cache:
del self.children_cache[entry.dir_id]
if entry.dir_id in self.modified:
del self.modified[entry.dir_id]
entry.dir_id = None
def remove(self, path):
"""
Removes both streams and storage DirEntry types from file.
storage type entries need to be empty dirs.
"""
entry = self.find(path)
if not entry:
raise ValueError("%s does not exists" % path)
if entry.type == 'root storage':
raise ValueError("can no remove root entry")
if entry.type == "storage" and not entry.child_id is None:
raise ValueError("storage contains children")
entry.pop()
# remove stream data
if entry.type == "stream":
self.free_fat_chain(entry.sector_id, entry.byte_size < self.min_stream_max_size)
self.free_dir_entry(entry)
def rmtree(self, path):
"""
Removes directory structure, similar to shutil.rmtree.
"""
for root, storage, streams in self.walk(path, topdown=False):
for item in streams:
self.free_fat_chain(item.sector_id, item.byte_size < self.min_stream_max_size)
self.free_dir_entry(item)
for item in storage:
self.free_dir_entry(item)
root.child_id = None
# remove root item
self.remove(path)
def listdir(self, path = None):
"""
Return a list containing the ``DirEntry`` objects in the directory
given by path.
"""
result = self.listdir_dict(path)
return result.values()
def listdir_dict(self, path = None):
"""
Return a dict containing the ``DirEntry`` objects in the directory
given by path with name of the dir as key.
"""
if path is None:
path = self.root
root = self.find(path)
if root is None:
raise ValueError("unable to find dir: %s" % str(path))
if not root.isdir():
raise ValueError("can only list storage types")
children = self.children_cache.get(root.dir_id, None)
if children is not None:
return children
child = root.child()
result = {}
if not child:
self.children_cache[root.dir_id] = result
return result
dir_per_sector = self.sector_size // 128
max_dirs_entries = self.dir_sector_count * dir_per_sector
stack = deque([child])
count = 0
while stack:
current = stack.pop()
result[current.name] = current
count += 1
if count > max_dirs_entries:
raise CompoundFileBinaryError("corrupt folder structure")
left = current.left()
if left:
stack.append(left)
right = current.right()
if right:
stack.append(right)
self.children_cache[root.dir_id] = result
return result
def find(self, path):
"""
find a ``DirEntry`` located at *path*. Returns ``None`` if path
does not exist.
"""
if isinstance(path, DirEntry):
return path
if path == "/":
return self.root
split_path = path.lstrip('/').split("/")
i = 0
root = self.root
while True:
children = self.listdir_dict(root)
match = children.get(split_path[i], None)
if match:
if i == len(split_path) - 1:
return match
root = match
i += 1
else:
return None
def exists(self, path):
"""
Return ``True`` if path refers to a existing path.
"""
if self.find(path) is None:
return False
return True
def makedir(self, path, class_id=None):
"""
Create a storage DirEntry name path
"""
return self.create_dir_entry(path, dir_type='storage', class_id=class_id)
def makedirs(self, path):
"""
Recursive storage DirEntry creation function.
"""
root = ""
assert path.startswith('/')
p = path.strip('/')
for item in p.split('/'):
root += "/" + item
if not self.exists(root):
self.makedir(root)
return self.find(path)
def move(self, src, dst):
"""
Moves ``DirEntry`` from src to dst
"""
src_entry = self.find(src)
if src_entry is None:
raise ValueError("src path does not exist: %s" % src)
if dst.endswith('/'):
dst += src_entry.name
if self.exists(dst):
raise ValueError("dst path already exist: %s" % dst)
if dst == '/' or src == '/':
raise ValueError("cannot overwrite root dir")
split_path = dst.strip('/').split('/')
dst_basename = split_path[-1]
dst_dirname = '/' + '/'.join(split_path[:-1])
# print(dst)
# print(dst_basename, dst_dirname)
dst_entry = self.find(dst_dirname)
if dst_entry is None:
raise ValueError("src path does not exist: %s" % dst_dirname)
if not dst_entry.isdir():
raise ValueError("dst dirname cannot be stream: %s" % dst_dirname)
# src_entry.parent.remove_child(src_entry)
src_entry.pop()
src_entry.parent = None
src_entry.name = dst_basename
dst_entry.add_child(src_entry)
self.children_cache[dst_entry.dir_id][src_entry.name] = src_entry
return src_entry
def open(self, path, mode='r'):
"""Open stream, returning ``Stream`` object"""
entry = self.find(path)
if entry is None:
if mode == 'r':
raise ValueError("stream does not exists: %s" % path)
entry = self.create_dir_entry(path, 'stream', None)
else:
if not entry.isfile():
raise ValueError("can only open stream type DirEntry's")
if mode == 'w':
logging.debug("stream: %s exists, overwriting" % path)
self.free_fat_chain(entry.sector_id, entry.byte_size < self.min_stream_max_size)
entry.sector_id = None
entry.byte_size = 0
entry.class_id = None
elif mode == 'rw':
pass
s = Stream(self, entry, mode)
return s
|
markreidvfx/pyaaf2
|
aaf2/cfb.py
|
CompoundFileBinary.makedir
|
python
|
def makedir(self, path, class_id=None):
return self.create_dir_entry(path, dir_type='storage', class_id=class_id)
|
Create a storage DirEntry name path
|
train
|
https://github.com/markreidvfx/pyaaf2/blob/37de8c10d3c3495cc00c705eb6c5048bc4a7e51f/aaf2/cfb.py#L1800-L1804
|
[
"def create_dir_entry(self, path, dir_type='storage', class_id=None):\n\n if self.exists(path):\n raise ValueError(\"%s already exists\" % path)\n\n dirname = os.path.dirname(path)\n basename = os.path.basename(path)\n\n root = self.find(dirname)\n\n if root is None:\n raise ValueError(\"parent dirname does not exist: %s\" % dirname)\n\n if not root.type in ('storage', 'root storage'):\n raise ValueError(\"can not add entry to non storage type\")\n\n dir_id = self.next_free_dir_id()\n logging.debug(\"next dir id %d\" % dir_id)\n\n entry = DirEntry(self, dir_id)\n entry.name = basename\n entry.type = dir_type\n entry.class_id = class_id\n\n # TODO: Implement a Red Black tree\n # all new DirEntries are black, so there is no tree balancing.\n # AAF Low-Level Container Specification says its alright to do this.\n\n entry.color = 'black'\n\n root.add_child(entry)\n self.dir_cache[dir_id] = entry\n\n return entry\n"
] |
class CompoundFileBinary(object):
def __init__(self, file_object, mode='rb', sector_size=4096):
self.f = file_object
self.difat = [[]]
self.fat = array(str('I'))
self.fat_freelist = []
self.minifat = array(str('I'))
self.minifat_freelist = []
self.difat_chain = []
self.minifat_chain = []
self.dir_fat_chain = []
self.mini_stream_chain = []
self.modified = {}
self.sector_cache = LRUCacheDict()
self.dir_cache = weakref.WeakValueDictionary()
self.children_cache = LRUCacheDict()
self.dir_freelist = []
self.debug_grow = False
self.is_open = True
if isinstance(self.f, BytesIO):
self.mode = 'wb+'
else:
self.mode = mode
if self.mode in ("r", "r+", "rb", 'rb+'):
self.read_header()
self.read_fat()
mini_stream_byte_size = self.read_minifat()
# create dir_fat_chain and read root dir entry
self.dir_fat_chain = self.get_fat_chain(self.dir_sector_start)
if len(self.dir_fat_chain) != self.dir_sector_count:
logging.info("read dir_sector_count missmatch, using fat chain length")
self.dir_sector_count = len(self.dir_fat_chain)
logging.debug("read %d dir sectors" % len(self.dir_fat_chain))
self.root = self.read_dir_entry(0)
self.dir_cache[0] = self.root
# create mini stream fat chain
if self.minifat_sector_count:
self.mini_stream_chain = self.get_fat_chain(self.root.sector_id)
if self.root.sector_id is not None and mini_stream_byte_size != self.root.byte_size:
message = "mini stream size missmatch: %d != %d, using size from minifat"
logging.warn(message % (self.root.byte_size, mini_stream_byte_size))
else:
self.setup_empty(sector_size)
self.write_header()
logging.debug("pos: %d" % self.f.tell())
logging.debug("writing root dir sector")
self.root.write()
self.f.write(bytearray(self.sector_size - 128))
self.write_fat()
def close(self):
if self.mode in ("r", "rb"):
return
# caculate mini stream size
if self.root.sector_id is not None:
# I cannot find this documented anywhere but the size of the mini stream
# is the size up to the last mini sector is uses. Not the total Non FREESECT's.
# If self.root.byte_size is not set correctly the some appications will crash hard...
# find last non-free sect
for i,v in enumerate(reversed(self.minifat)):
if v != FREESECT:
break
last_used_sector_id = len(self.minifat) - i
mini_stream_byte_size = (last_used_sector_id * self.mini_stream_sector_size)
self.root.byte_size = mini_stream_byte_size
# Truncate ministream
s = Stream(self, self.root, 'rw')
s.truncate(mini_stream_byte_size)
self.write_header()
self.write_difat()
self.write_fat()
self.write_minifat()
self.write_dir_entries()
# Truncate file to the last free sector
for i,v in enumerate(reversed(self.fat)):
if v != FREESECT:
break
last_used_sector_id = len(self.fat) - i
pos = (last_used_sector_id + 1) * self.sector_size
self.f.seek(pos)
self.f.truncate()
self.is_open = False
def setup_empty(self, sector_size):
if sector_size == 4096:
self.class_id = auid.AUID("0d010201-0200-0000-060e-2b3403020101")
elif sector_size == 512:
self.class_id = auid.AUID("42464141-000d-4d4f-060e-2b34010101ff")
else:
raise ValueError("sector size must be 4096 or 512")
self.major_version = 4
self.minor_version = 62
self.byte_order = "le"
self.sector_size = sector_size
self.mini_stream_sector_size = 64
self.dir_sector_count = 1
self.fat_sector_count = 1
self.dir_sector_start = 0
self.transaction_signature = 1
self.min_stream_max_size = 4096
self.minifat_sector_start = FREESECT
self.minifat_sector_count = 0
self.difat_sector_start = FREESECT
self.difat_sector_count = 0
self.difat = [[]]
for i in range(109):
self.difat[0].append(FREESECT)
self.difat[0][0] = 1
for i in range(self.sector_size // 4):
self.fat.append(FREESECT)
if i > 1:
self.fat_freelist.append(i)
self.fat[0] = ENDOFCHAIN # end of dir chain
self.fat[self.difat[0][0]] = FATSECT
self.root = DirEntry(self, 0)
self.root.name = 'Root Entry'
self.root.sector_id = None
self.root.type = 'root storage'
self.root.class_id = auid.AUID("b3b398a5-1c90-11d4-8053-080036210804")
self.dir_cache[0] = self.root
self.dir_fat_chain = [0]
# raise NotImplementedError("mode: %s supported not implemented" % self.f.mode)
def write_header(self):
logging.debug("writiing header")
f = self.f
f.seek(0)
f.write(b'\xd0\xcf\x11\xe0\xa1\xb1\x1a\xe1') # Magic
f.write(self.class_id.bytes_le)
write_u16le(f, self.minor_version)
write_u16le(f, self.major_version)
write_u16le(f, 0xFFFE) #byte order le
write_u16le(f, int(math.log(self.sector_size, 2)))
write_u16le(f, int(math.log(self.mini_stream_sector_size, 2)))
f.write(b'\0' * 6) #skip reseverd
write_u32le(f, self.dir_sector_count)
write_u32le(f, self.fat_sector_count)
write_u32le(f, self.dir_sector_start)
write_u32le(f, self.transaction_signature)
write_u32le(f, self.min_stream_max_size)
write_u32le(f, self.minifat_sector_start)
write_u32le(f, self.minifat_sector_count)
write_u32le(f, self.difat_sector_start)
write_u32le(f, self.difat_sector_count)
for i in range(109):
write_u32le(f, self.difat[0][i])
for i in range(self.sector_size - f.tell()):
f.write(b'\0')
def read_header(self):
f = self.f
f.seek(0)
magic = f.read(8)
# logging.debug("magic: %s" % magic.encode("hex"))
logging.debug("magic: %s" % str([magic]))
# clsid = f.read(16)
# logging.debug("clsid: %s" % clsid.encode("hex"))
self.class_id = auid.AUID(bytes_le=f.read(16))
logging.debug("clsid: %s" % str(self.class_id))
self.minor_version = read_u16le(f)
logging.debug("minor_version: %d" % self.minor_version)
self.major_version = read_u16le(f)
logging.debug("major_version: %d" % self.major_version)
byte_order = read_u16le(f)
if byte_order == 0xFFFE:
self.byte_order = 'le'
else:
raise NotImplementedError("endian format:0x%X not supported" % byte_order)
logging.debug("byte_order: %s" % self.byte_order)
size = read_u16le(f)
self.sector_size = pow(2, size)
logging.debug("sector_size: %d -> %d" % (size, self.sector_size))
size = read_u16le(f)
self.mini_stream_sector_size = pow(2, size)
logging.debug("mini_stream_sector_size: %d -> %d" % (size, self.mini_stream_sector_size))
if not self.sector_size in (4096, 512):
raise NotImplementedError("unsupported sector size: %d" % self.sector_size)
if self.mini_stream_sector_size != 64:
raise NotImplementedError("unsupported mini sector size: %d" % self.mini_stream_sector_size)
f.read(6) #skip reseverd
self.dir_sector_count = read_u32le(f)
logging.debug("dir_sector_count: %d" % self.dir_sector_count)
self.fat_sector_count = read_u32le(f)
logging.debug("fat_sector_count: %d" % self.fat_sector_count)
self.dir_sector_start = read_u32le(f)
logging.debug("dir_sector_start: %d" % self.dir_sector_start)
self.transaction_signature = read_u32le(f)
logging.debug("transaction_signature: %d" % self.transaction_signature)
self.min_stream_max_size = read_u32le(f)
logging.debug("min_stream_max_size: %d" % self.min_stream_max_size)
self.minifat_sector_start = read_u32le(f)
logging.debug("minifat_sector_start: %d" % self.minifat_sector_start)
self.minifat_sector_count = read_u32le(f)
logging.debug("minifat_sector_count: %d" % self.minifat_sector_count)
self.difat_sector_start = read_u32le(f)
logging.debug("difat_sector_start: %d" % self.difat_sector_start)
self.difat_sector_count = read_u32le(f)
logging.debug("difat_sector_count: %d" % self.difat_sector_count)
self.difat = [[]]
logging.debug("reading header difat at %d" % f.tell())
for i in range(109):
item = read_u32le(f)
# item = fat_sector_types.get(item, item)
self.difat[0].append(item)
sectors_left = self.difat_sector_count
sid = self.difat_sector_start
# reading difat sectors
while sectors_left:
logging.debug("reading difat sid: %d", sid)
sector_type = fat_sector_types.get(sid, sid)
if not isinstance(sector_type, int):
break
self.difat_chain.append(sid)
f.seek((sid + 1) * self.sector_size)
difat = []
for i in range( (self.sector_size // 4)):
item = read_u32le(f)
difat.append(item)
self.difat.append(difat)
sid = difat[-1]
logging.debug("next difat: %d" % sid)
sectors_left -= 1
def iter_difat(self):
for i, sid in enumerate(self.difat[0]):
yield 0, i, sid
t = 1
for item in self.difat[1:]:
for i, sid in enumerate(item[:-1]):
yield t, i, sid
t+=1
def write_difat(self):
f = self.f
# write header entries
f.seek(76)
logging.debug("writing header difat")
for i in range(109):
write_u32le(f, self.difat[0][i])
for i in range(self.sector_size - f.tell()):
f.write(b'\0')
if self.difat_sector_count == 0:
return
sid = self.difat_sector_start
assert len(self.difat[1:]) == self.difat_sector_count
for table in self.difat[1:]:
sector_type = fat_sector_types.get(sid, sid)
if not isinstance(sector_type, int):
raise IOError("bad difat sector type")
pos = (sid + 1) * self.sector_size
logging.debug("writing difat to sid: %d at: %d" % (sid,pos))
f.seek(pos)
for i in range(self.sector_size // 4):
write_u32le(f, table[i])
sid = table[-1]
def read_fat(self):
f = self.f
self.fat = array(str('I'))
sector_count = 0
fat_sectors = []
for t, i, sid in self.iter_difat():
sector_type = fat_sector_types.get(sid, sid)
if not isinstance(sector_type, int):
continue
fat_sectors.append(sid)
# len(fat_sectors),self.fat_sector_count
# assert len(fat_sectors) == self.fat_sector_count
if len(fat_sectors) != self.fat_sector_count:
logging.warn("fat sector count missmatch difat: %d header: %d" % (len(fat_sectors), self.fat_sector_count))
self.fat_sector_count = len(fat_sectors)
for sid in fat_sectors:
pos = (sid + 1) * self.sector_size
f.seek(pos)
extend_sid_table(f, self.fat, self.sector_size)
sector_count += 1
if sys.byteorder == 'big':
self.fat.byteswap()
for i,v in enumerate(self.fat):
if v == FREESECT:
self.fat_freelist.append(i)
logging.debug("read %d fat sectors ", sector_count)
if self.sector_size == 4096 and len(self.fat) > RANGELOCKSECT:
if self.fat[RANGELOCKSECT] != ENDOFCHAIN:
logging.warn("range lock sector has data")
# logging.debug("fat: %s" % str(pretty_sectors(self.fat)))
def write_fat(self):
logging.debug("writing fat")
f = self.f
sector_count = 0
assert len(self.fat)*4 % self.sector_size == 0
fat_sectors = []
for t, i, sid in self.iter_difat():
sector_type = fat_sector_types.get(sid, sid)
if not isinstance(sector_type, int):
continue
fat_sectors.append(sid)
# check that the difat has enough entries to hold the current fat
assert len(fat_sectors) == len(self.fat)*4 // self.sector_size
element_count = self.sector_size // 4
fat_table_struct = Struct(str('<%dI' % element_count))
for i, sid in enumerate(fat_sectors):
# logging.debug("writing fat to sid: %d" % sid)
f.seek((sid + 1) * self.sector_size)
start = i * element_count
end = start + element_count
f.write(fat_table_struct.pack(*self.fat[start:end]))
def read_minifat(self):
f = self.f
sector_count = 0
self.minifat = array(str('I'))
for sid in self.get_fat_chain(self.minifat_sector_start):
self.minifat_chain.append(sid)
f.seek((sid + 1) * self.sector_size)
extend_sid_table(f, self.minifat, self.sector_size)
sector_count += 1
if sys.byteorder == 'big':
self.minifat.byteswap()
# mini_stream_byte_size = 0
last_used_sector = 0
for i,v in enumerate(self.minifat):
if v == FREESECT:
self.minifat_freelist.append(i)
else:
last_used_sector = i
# mini_stream_byte_size += self.mini_stream_sector_size
mini_stream_byte_size = ((last_used_sector+1) * self.mini_stream_sector_size)
# for i, sect in enumerate(pretty_sectors(self.minifat)):
# print(i, sect)
logging.debug("read %d mini fat sectors", sector_count)
return mini_stream_byte_size
def write_minifat(self):
f = self.f
sector_count = 0
element_count = self.sector_size // 4
fat_table_struct = Struct(str('<%dI' % element_count))
for i, sid in enumerate(self.get_fat_chain(self.minifat_sector_start)):
pos = (sid + 1) * self.sector_size
f.seek(pos)
start = i * element_count
end = start + element_count
f.write(fat_table_struct.pack(*self.minifat[start:end]))
def write_modified_dir_entries(self):
f = self.f
for dir_id in sorted(self.modified):
entry = self.modified[dir_id]
stream_pos = entry.dir_id * 128
chain_index = stream_pos // self.sector_size
sid_offset = stream_pos % self.sector_size
sid = self.dir_fat_chain[chain_index]
pos = ((sid + 1) * self.sector_size) + sid_offset
f.seek(pos)
f.write(entry.data)
# invalidate sector
if sid in self.sector_cache:
del self.sector_cache[sid]
self.modified = {}
def write_dir_entries(self):
self.write_modified_dir_entries()
# clear empty DirEntrys
empty_dir = bytearray(128)
f = self.f
self.dir_freelist.sort()
for dir_id in self.dir_freelist:
stream_pos = dir_id * 128
chain_index = stream_pos // self.sector_size
sid_offset = stream_pos % self.sector_size
sid = self.dir_fat_chain[chain_index]
pos = ((sid + 1) * self.sector_size) + sid_offset
f.seek(pos)
f.write(empty_dir)
def next_free_minifat_sect(self):
idx_per_sect = self.sector_size // self.mini_stream_sector_size
stream_sects = len(self.mini_stream_chain) * idx_per_sect
if self.minifat_freelist:
i = self.minifat_freelist.pop(0)
assert self.minifat[i] == FREESECT
if i+1 > stream_sects:
self.mini_stream_grow()
return i
# if we got here need to add aditional fat
sid = self.next_free_sect()
# logging.warn("growing minifat to sid %d" % sid)
idx_start = len(self.minifat)
idx_end = idx_start + self.sector_size // 4
self.minifat.extend([FREESECT for i in range(idx_start, idx_end)])
self.minifat_freelist.extend([i for i in range(idx_start, idx_end)])
if self.minifat_sector_count == 0:
self.minifat_sector_count = 1
self.minifat_sector_start = sid
else:
self.minifat_sector_count += 1
self.fat[self.minifat_chain[-1]] = sid
self.minifat_chain.append(sid)
self.fat[sid] = ENDOFCHAIN
return self.next_free_minifat_sect()
def next_free_sect(self):
if self.fat_freelist:
# print("using fat free list")
i = self.fat_freelist.pop(0)
assert self.fat[i] == FREESECT
# Handle Range Lock Sector
if i == RANGELOCKSECT and self.sector_size == 4096:
self.fat[i] = ENDOFCHAIN
logging.warning("range lock sector in fat freelist, marking ENDOFCHAIN")
return self.next_free_sect()
return i
# if we got here need to add aditional fat
# logging.debug("fat full, growing")
difat_table = None
difat_index = None
for t, i, v in self.iter_difat():
if v == FREESECT:
difat_table = t
difat_index = i
break
new_difat_sect = None
if difat_index is None:
new_difat_sect = len(self.fat) + 1
logging.debug("adding new difat to sid: %d" % new_difat_sect)
if self.difat_sector_count == 0:
self.difat_sector_start = new_difat_sect
self.difat_sector_count = 1
else:
self.difat[-1][-1] = new_difat_sect
self.difat_sector_count += 1
# add difat table
difat = []
for i in range(self.sector_size // 4):
difat.append(FREESECT)
difat[-1] == ENDOFCHAIN
self.difat.append(difat)
for t, i, v in self.iter_difat():
if v == FREESECT:
difat_table = t
difat_index = i
break
new_fat_sect = len(self.fat)
# logging.debug("adding new fat to sid: %d" % new_fat_sect)
self.difat[difat_table][difat_index] = new_fat_sect
# grow fat entries
idx_start = len(self.fat)
idx_end = idx_start + (self.sector_size // 4)
self.fat.extend([FREESECT for i in range(self.sector_size // 4)])
non_free_sids = set([new_fat_sect, new_difat_sect])
# Handle Range Lock Sector
# The range lock sector is the sector
# that covers file offsets 0x7FFFFF00-0x7FFFFFFF in the file
if RANGELOCKSECT < idx_end and RANGELOCKSECT > idx_start and self.sector_size == 4096:
non_free_sids.add(RANGELOCKSECT)
logging.debug("adding range lock")
self.fat[RANGELOCKSECT] = ENDOFCHAIN
freelist = [i for i in range(idx_start, idx_end) if i not in non_free_sids]
self.fat_freelist.extend(freelist)
self.fat[new_fat_sect] = FATSECT
self.fat_sector_count += 1
if not new_difat_sect is None:
self.fat[new_difat_sect] = DIFSECT
return self.next_free_sect()
def read_sector_data(self, sid):
sector_data = self.sector_cache.get(sid, None)
if sector_data is not None:
return sector_data
else:
pos = (sid + 1) * self.sector_size
self.f.seek(pos)
sector_data = bytearray(self.sector_size)
#NOTE: if requested sector doesn't exsit or
# is truncated will padd with zeros, expected behavour
bytes_read = self.f.readinto(sector_data)
self.sector_cache[sid] = sector_data
return sector_data
def get_sid_offset(self, abs_pos):
sid, sid_offset = divmod(abs_pos, self.sector_size)
return sid-1, sid_offset
def dir_entry_sid_offset(self, dir_id):
stream_pos = dir_id * 128
chain_index, sid_offset = divmod(stream_pos, self.sector_size)
sid = self.dir_fat_chain[chain_index]
return sid, sid_offset
def dir_entry_pos(self, dir_id):
sid, sid_offset = self.dir_entry_sid_offset(dir_id)
pos = ((sid + 1) * self.sector_size) + sid_offset
return pos
def read_dir_entry(self, dir_id, parent = None):
if dir_id is None:
return None
entry = self.dir_cache.get(dir_id, None)
if entry is not None:
return entry
# assert not dir_id in self.dir_freelist
stream_pos = dir_id * 128
chain_index = stream_pos // self.sector_size
sid_offset = stream_pos % self.sector_size
sid = self.dir_fat_chain[chain_index]
sector_data = self.read_sector_data(sid)
data= bytearray(sector_data[sid_offset:sid_offset+128])
entry = DirEntry(self, dir_id, data=data)
entry.parent = parent
self.dir_cache[dir_id] = entry
return entry
def clear_sector(self, sid):
sector_pos = (sid + 1) * self.sector_size
self.f.seek(sector_pos)
# for i in range(self.sector_size):
self.f.write(bytearray(self.sector_size))
def next_free_dir_id(self):
# use free list first
if self.dir_freelist:
return self.dir_freelist.pop(0)
f = self.f
sect = self.fat_chain_append(self.dir_fat_chain[-1])
self.dir_fat_chain.append(sect)
self.dir_sector_count += 1
first_dir_id = (len(self.dir_fat_chain) - 1) * self.sector_size // 128
last_dir_id = first_dir_id + (self.sector_size // 128)
self.dir_freelist.extend(range(first_dir_id, last_dir_id))
return self.next_free_dir_id()
def get_fat_chain(self, start_sid, minifat=False):
fat = self.fat
fat_name = "FAT"
if minifat:
fat = self.minifat
fat_name = "MINIFAT"
# Floyd's Tortoise and Hare cycle-finding algorithm
a = start_sid
b = start_sid
sectors = []
if start_sid in (None, ENDOFCHAIN, FREESECT, DIFSECT, FATSECT):
return []
while b != ENDOFCHAIN:
sectors.append(b)
b = fat[b]
if a != ENDOFCHAIN:
a = fat[a]
if a != ENDOFCHAIN:
a = fat[a]
if a == b:
raise CompoundFileBinaryError('cyclic %s fat chain found starting at %d' % (fat_name, start_sid))
return sectors
def mini_stream_grow(self):
sid = self.next_free_sect()
# logging.debug("adding to mini stream fat sid: %d" % sid)
if not self.mini_stream_chain:
self.mini_stream_chain = [sid]
self.root.sector_id = sid
else:
self.fat[self.mini_stream_chain[-1]] = sid
self.mini_stream_chain.append(sid)
self.fat[sid] = ENDOFCHAIN
def fat_chain_append(self, start_sid, minifat=False):
if minifat:
sect = self.next_free_minifat_sect()
# logging.debug("creating new mini sector: %d" % sect)
fat = self.minifat
else:
sect = self.next_free_sect()
# logging.debug("creating new sector: %d" % sect)
fat = self.fat
if start_sid is None:
fat[sect] = ENDOFCHAIN
else:
fat_chain = self.get_fat_chain(start_sid, minifat)
assert fat_chain
fat[fat_chain[-1]] = sect
fat[sect] = ENDOFCHAIN
return sect
def free_fat_chain(self, start_sid, minifat=False):
fat =self.fat
if minifat:
fat = self.minifat
for sid in self.get_fat_chain(start_sid, minifat):
fat[sid] = FREESECT
if minifat:
self.minifat_freelist.insert(0, sid)
else:
self.fat_freelist.insert(0, sid)
def create_dir_entry(self, path, dir_type='storage', class_id=None):
if self.exists(path):
raise ValueError("%s already exists" % path)
dirname = os.path.dirname(path)
basename = os.path.basename(path)
root = self.find(dirname)
if root is None:
raise ValueError("parent dirname does not exist: %s" % dirname)
if not root.type in ('storage', 'root storage'):
raise ValueError("can not add entry to non storage type")
dir_id = self.next_free_dir_id()
logging.debug("next dir id %d" % dir_id)
entry = DirEntry(self, dir_id)
entry.name = basename
entry.type = dir_type
entry.class_id = class_id
# TODO: Implement a Red Black tree
# all new DirEntries are black, so there is no tree balancing.
# AAF Low-Level Container Specification says its alright to do this.
entry.color = 'black'
root.add_child(entry)
self.dir_cache[dir_id] = entry
return entry
def free_dir_entry(self, entry):
# add freelist
self.dir_freelist.append(entry.dir_id)
# remove from dir caches
if entry.dir_id in self.dir_cache:
del self.dir_cache[entry.dir_id]
if entry.dir_id in self.children_cache:
del self.children_cache[entry.dir_id]
if entry.dir_id in self.modified:
del self.modified[entry.dir_id]
entry.dir_id = None
def remove(self, path):
"""
Removes both streams and storage DirEntry types from file.
storage type entries need to be empty dirs.
"""
entry = self.find(path)
if not entry:
raise ValueError("%s does not exists" % path)
if entry.type == 'root storage':
raise ValueError("can no remove root entry")
if entry.type == "storage" and not entry.child_id is None:
raise ValueError("storage contains children")
entry.pop()
# remove stream data
if entry.type == "stream":
self.free_fat_chain(entry.sector_id, entry.byte_size < self.min_stream_max_size)
self.free_dir_entry(entry)
def rmtree(self, path):
"""
Removes directory structure, similar to shutil.rmtree.
"""
for root, storage, streams in self.walk(path, topdown=False):
for item in streams:
self.free_fat_chain(item.sector_id, item.byte_size < self.min_stream_max_size)
self.free_dir_entry(item)
for item in storage:
self.free_dir_entry(item)
root.child_id = None
# remove root item
self.remove(path)
def listdir(self, path = None):
"""
Return a list containing the ``DirEntry`` objects in the directory
given by path.
"""
result = self.listdir_dict(path)
return result.values()
def listdir_dict(self, path = None):
"""
Return a dict containing the ``DirEntry`` objects in the directory
given by path with name of the dir as key.
"""
if path is None:
path = self.root
root = self.find(path)
if root is None:
raise ValueError("unable to find dir: %s" % str(path))
if not root.isdir():
raise ValueError("can only list storage types")
children = self.children_cache.get(root.dir_id, None)
if children is not None:
return children
child = root.child()
result = {}
if not child:
self.children_cache[root.dir_id] = result
return result
dir_per_sector = self.sector_size // 128
max_dirs_entries = self.dir_sector_count * dir_per_sector
stack = deque([child])
count = 0
while stack:
current = stack.pop()
result[current.name] = current
count += 1
if count > max_dirs_entries:
raise CompoundFileBinaryError("corrupt folder structure")
left = current.left()
if left:
stack.append(left)
right = current.right()
if right:
stack.append(right)
self.children_cache[root.dir_id] = result
return result
def find(self, path):
"""
find a ``DirEntry`` located at *path*. Returns ``None`` if path
does not exist.
"""
if isinstance(path, DirEntry):
return path
if path == "/":
return self.root
split_path = path.lstrip('/').split("/")
i = 0
root = self.root
while True:
children = self.listdir_dict(root)
match = children.get(split_path[i], None)
if match:
if i == len(split_path) - 1:
return match
root = match
i += 1
else:
return None
def walk(self, path = None, topdown=True):
"""
Similar to :func:`os.walk`, yeields a 3-tuple ``(root, storage_items, stream_items)``
"""
if path is None:
path = self.root
root = self.find(path)
if not root.isdir():
raise ValueError("can only walk storage types")
if not root.child_id:
return
if topdown:
storage_items = []
stream_items = []
for item in self.listdir(root):
if item.isdir():
storage_items.append(item)
else:
stream_items.append(item)
yield root, storage_items, stream_items
for item in storage_items:
for root, storage_items, stream_items in self.walk(item):
yield root, storage_items, stream_items
else:
def topdown_visit_node(root):
storage_items = []
stream_items = []
for item in self.listdir(root):
if item.isdir():
for sub_root, sub_storage, sub_stream in topdown_visit_node(item):
yield sub_root, sub_storage, sub_stream
storage_items.append(item)
else:
stream_items.append(item)
yield root, storage_items, stream_items
for root_item, storage, stream in topdown_visit_node(root):
yield root_item, storage, stream
def exists(self, path):
"""
Return ``True`` if path refers to a existing path.
"""
if self.find(path) is None:
return False
return True
def makedirs(self, path):
"""
Recursive storage DirEntry creation function.
"""
root = ""
assert path.startswith('/')
p = path.strip('/')
for item in p.split('/'):
root += "/" + item
if not self.exists(root):
self.makedir(root)
return self.find(path)
def move(self, src, dst):
"""
Moves ``DirEntry`` from src to dst
"""
src_entry = self.find(src)
if src_entry is None:
raise ValueError("src path does not exist: %s" % src)
if dst.endswith('/'):
dst += src_entry.name
if self.exists(dst):
raise ValueError("dst path already exist: %s" % dst)
if dst == '/' or src == '/':
raise ValueError("cannot overwrite root dir")
split_path = dst.strip('/').split('/')
dst_basename = split_path[-1]
dst_dirname = '/' + '/'.join(split_path[:-1])
# print(dst)
# print(dst_basename, dst_dirname)
dst_entry = self.find(dst_dirname)
if dst_entry is None:
raise ValueError("src path does not exist: %s" % dst_dirname)
if not dst_entry.isdir():
raise ValueError("dst dirname cannot be stream: %s" % dst_dirname)
# src_entry.parent.remove_child(src_entry)
src_entry.pop()
src_entry.parent = None
src_entry.name = dst_basename
dst_entry.add_child(src_entry)
self.children_cache[dst_entry.dir_id][src_entry.name] = src_entry
return src_entry
def open(self, path, mode='r'):
"""Open stream, returning ``Stream`` object"""
entry = self.find(path)
if entry is None:
if mode == 'r':
raise ValueError("stream does not exists: %s" % path)
entry = self.create_dir_entry(path, 'stream', None)
else:
if not entry.isfile():
raise ValueError("can only open stream type DirEntry's")
if mode == 'w':
logging.debug("stream: %s exists, overwriting" % path)
self.free_fat_chain(entry.sector_id, entry.byte_size < self.min_stream_max_size)
entry.sector_id = None
entry.byte_size = 0
entry.class_id = None
elif mode == 'rw':
pass
s = Stream(self, entry, mode)
return s
|
markreidvfx/pyaaf2
|
aaf2/cfb.py
|
CompoundFileBinary.makedirs
|
python
|
def makedirs(self, path):
root = ""
assert path.startswith('/')
p = path.strip('/')
for item in p.split('/'):
root += "/" + item
if not self.exists(root):
self.makedir(root)
return self.find(path)
|
Recursive storage DirEntry creation function.
|
train
|
https://github.com/markreidvfx/pyaaf2/blob/37de8c10d3c3495cc00c705eb6c5048bc4a7e51f/aaf2/cfb.py#L1806-L1819
|
[
"def find(self, path):\n \"\"\"\n find a ``DirEntry`` located at *path*. Returns ``None`` if path\n does not exist.\n \"\"\"\n\n if isinstance(path, DirEntry):\n return path\n\n if path == \"/\":\n return self.root\n\n split_path = path.lstrip('/').split(\"/\")\n\n i = 0\n root = self.root\n\n while True:\n\n children = self.listdir_dict(root)\n match = children.get(split_path[i], None)\n\n if match:\n if i == len(split_path) - 1:\n return match\n root = match\n i += 1\n else:\n return None\n",
"def exists(self, path):\n \"\"\"\n Return ``True`` if path refers to a existing path.\n \"\"\"\n if self.find(path) is None:\n return False\n return True\n"
] |
class CompoundFileBinary(object):
def __init__(self, file_object, mode='rb', sector_size=4096):
self.f = file_object
self.difat = [[]]
self.fat = array(str('I'))
self.fat_freelist = []
self.minifat = array(str('I'))
self.minifat_freelist = []
self.difat_chain = []
self.minifat_chain = []
self.dir_fat_chain = []
self.mini_stream_chain = []
self.modified = {}
self.sector_cache = LRUCacheDict()
self.dir_cache = weakref.WeakValueDictionary()
self.children_cache = LRUCacheDict()
self.dir_freelist = []
self.debug_grow = False
self.is_open = True
if isinstance(self.f, BytesIO):
self.mode = 'wb+'
else:
self.mode = mode
if self.mode in ("r", "r+", "rb", 'rb+'):
self.read_header()
self.read_fat()
mini_stream_byte_size = self.read_minifat()
# create dir_fat_chain and read root dir entry
self.dir_fat_chain = self.get_fat_chain(self.dir_sector_start)
if len(self.dir_fat_chain) != self.dir_sector_count:
logging.info("read dir_sector_count missmatch, using fat chain length")
self.dir_sector_count = len(self.dir_fat_chain)
logging.debug("read %d dir sectors" % len(self.dir_fat_chain))
self.root = self.read_dir_entry(0)
self.dir_cache[0] = self.root
# create mini stream fat chain
if self.minifat_sector_count:
self.mini_stream_chain = self.get_fat_chain(self.root.sector_id)
if self.root.sector_id is not None and mini_stream_byte_size != self.root.byte_size:
message = "mini stream size missmatch: %d != %d, using size from minifat"
logging.warn(message % (self.root.byte_size, mini_stream_byte_size))
else:
self.setup_empty(sector_size)
self.write_header()
logging.debug("pos: %d" % self.f.tell())
logging.debug("writing root dir sector")
self.root.write()
self.f.write(bytearray(self.sector_size - 128))
self.write_fat()
def close(self):
if self.mode in ("r", "rb"):
return
# caculate mini stream size
if self.root.sector_id is not None:
# I cannot find this documented anywhere but the size of the mini stream
# is the size up to the last mini sector is uses. Not the total Non FREESECT's.
# If self.root.byte_size is not set correctly the some appications will crash hard...
# find last non-free sect
for i,v in enumerate(reversed(self.minifat)):
if v != FREESECT:
break
last_used_sector_id = len(self.minifat) - i
mini_stream_byte_size = (last_used_sector_id * self.mini_stream_sector_size)
self.root.byte_size = mini_stream_byte_size
# Truncate ministream
s = Stream(self, self.root, 'rw')
s.truncate(mini_stream_byte_size)
self.write_header()
self.write_difat()
self.write_fat()
self.write_minifat()
self.write_dir_entries()
# Truncate file to the last free sector
for i,v in enumerate(reversed(self.fat)):
if v != FREESECT:
break
last_used_sector_id = len(self.fat) - i
pos = (last_used_sector_id + 1) * self.sector_size
self.f.seek(pos)
self.f.truncate()
self.is_open = False
def setup_empty(self, sector_size):
if sector_size == 4096:
self.class_id = auid.AUID("0d010201-0200-0000-060e-2b3403020101")
elif sector_size == 512:
self.class_id = auid.AUID("42464141-000d-4d4f-060e-2b34010101ff")
else:
raise ValueError("sector size must be 4096 or 512")
self.major_version = 4
self.minor_version = 62
self.byte_order = "le"
self.sector_size = sector_size
self.mini_stream_sector_size = 64
self.dir_sector_count = 1
self.fat_sector_count = 1
self.dir_sector_start = 0
self.transaction_signature = 1
self.min_stream_max_size = 4096
self.minifat_sector_start = FREESECT
self.minifat_sector_count = 0
self.difat_sector_start = FREESECT
self.difat_sector_count = 0
self.difat = [[]]
for i in range(109):
self.difat[0].append(FREESECT)
self.difat[0][0] = 1
for i in range(self.sector_size // 4):
self.fat.append(FREESECT)
if i > 1:
self.fat_freelist.append(i)
self.fat[0] = ENDOFCHAIN # end of dir chain
self.fat[self.difat[0][0]] = FATSECT
self.root = DirEntry(self, 0)
self.root.name = 'Root Entry'
self.root.sector_id = None
self.root.type = 'root storage'
self.root.class_id = auid.AUID("b3b398a5-1c90-11d4-8053-080036210804")
self.dir_cache[0] = self.root
self.dir_fat_chain = [0]
# raise NotImplementedError("mode: %s supported not implemented" % self.f.mode)
def write_header(self):
logging.debug("writiing header")
f = self.f
f.seek(0)
f.write(b'\xd0\xcf\x11\xe0\xa1\xb1\x1a\xe1') # Magic
f.write(self.class_id.bytes_le)
write_u16le(f, self.minor_version)
write_u16le(f, self.major_version)
write_u16le(f, 0xFFFE) #byte order le
write_u16le(f, int(math.log(self.sector_size, 2)))
write_u16le(f, int(math.log(self.mini_stream_sector_size, 2)))
f.write(b'\0' * 6) #skip reseverd
write_u32le(f, self.dir_sector_count)
write_u32le(f, self.fat_sector_count)
write_u32le(f, self.dir_sector_start)
write_u32le(f, self.transaction_signature)
write_u32le(f, self.min_stream_max_size)
write_u32le(f, self.minifat_sector_start)
write_u32le(f, self.minifat_sector_count)
write_u32le(f, self.difat_sector_start)
write_u32le(f, self.difat_sector_count)
for i in range(109):
write_u32le(f, self.difat[0][i])
for i in range(self.sector_size - f.tell()):
f.write(b'\0')
def read_header(self):
f = self.f
f.seek(0)
magic = f.read(8)
# logging.debug("magic: %s" % magic.encode("hex"))
logging.debug("magic: %s" % str([magic]))
# clsid = f.read(16)
# logging.debug("clsid: %s" % clsid.encode("hex"))
self.class_id = auid.AUID(bytes_le=f.read(16))
logging.debug("clsid: %s" % str(self.class_id))
self.minor_version = read_u16le(f)
logging.debug("minor_version: %d" % self.minor_version)
self.major_version = read_u16le(f)
logging.debug("major_version: %d" % self.major_version)
byte_order = read_u16le(f)
if byte_order == 0xFFFE:
self.byte_order = 'le'
else:
raise NotImplementedError("endian format:0x%X not supported" % byte_order)
logging.debug("byte_order: %s" % self.byte_order)
size = read_u16le(f)
self.sector_size = pow(2, size)
logging.debug("sector_size: %d -> %d" % (size, self.sector_size))
size = read_u16le(f)
self.mini_stream_sector_size = pow(2, size)
logging.debug("mini_stream_sector_size: %d -> %d" % (size, self.mini_stream_sector_size))
if not self.sector_size in (4096, 512):
raise NotImplementedError("unsupported sector size: %d" % self.sector_size)
if self.mini_stream_sector_size != 64:
raise NotImplementedError("unsupported mini sector size: %d" % self.mini_stream_sector_size)
f.read(6) #skip reseverd
self.dir_sector_count = read_u32le(f)
logging.debug("dir_sector_count: %d" % self.dir_sector_count)
self.fat_sector_count = read_u32le(f)
logging.debug("fat_sector_count: %d" % self.fat_sector_count)
self.dir_sector_start = read_u32le(f)
logging.debug("dir_sector_start: %d" % self.dir_sector_start)
self.transaction_signature = read_u32le(f)
logging.debug("transaction_signature: %d" % self.transaction_signature)
self.min_stream_max_size = read_u32le(f)
logging.debug("min_stream_max_size: %d" % self.min_stream_max_size)
self.minifat_sector_start = read_u32le(f)
logging.debug("minifat_sector_start: %d" % self.minifat_sector_start)
self.minifat_sector_count = read_u32le(f)
logging.debug("minifat_sector_count: %d" % self.minifat_sector_count)
self.difat_sector_start = read_u32le(f)
logging.debug("difat_sector_start: %d" % self.difat_sector_start)
self.difat_sector_count = read_u32le(f)
logging.debug("difat_sector_count: %d" % self.difat_sector_count)
self.difat = [[]]
logging.debug("reading header difat at %d" % f.tell())
for i in range(109):
item = read_u32le(f)
# item = fat_sector_types.get(item, item)
self.difat[0].append(item)
sectors_left = self.difat_sector_count
sid = self.difat_sector_start
# reading difat sectors
while sectors_left:
logging.debug("reading difat sid: %d", sid)
sector_type = fat_sector_types.get(sid, sid)
if not isinstance(sector_type, int):
break
self.difat_chain.append(sid)
f.seek((sid + 1) * self.sector_size)
difat = []
for i in range( (self.sector_size // 4)):
item = read_u32le(f)
difat.append(item)
self.difat.append(difat)
sid = difat[-1]
logging.debug("next difat: %d" % sid)
sectors_left -= 1
def iter_difat(self):
for i, sid in enumerate(self.difat[0]):
yield 0, i, sid
t = 1
for item in self.difat[1:]:
for i, sid in enumerate(item[:-1]):
yield t, i, sid
t+=1
def write_difat(self):
f = self.f
# write header entries
f.seek(76)
logging.debug("writing header difat")
for i in range(109):
write_u32le(f, self.difat[0][i])
for i in range(self.sector_size - f.tell()):
f.write(b'\0')
if self.difat_sector_count == 0:
return
sid = self.difat_sector_start
assert len(self.difat[1:]) == self.difat_sector_count
for table in self.difat[1:]:
sector_type = fat_sector_types.get(sid, sid)
if not isinstance(sector_type, int):
raise IOError("bad difat sector type")
pos = (sid + 1) * self.sector_size
logging.debug("writing difat to sid: %d at: %d" % (sid,pos))
f.seek(pos)
for i in range(self.sector_size // 4):
write_u32le(f, table[i])
sid = table[-1]
def read_fat(self):
f = self.f
self.fat = array(str('I'))
sector_count = 0
fat_sectors = []
for t, i, sid in self.iter_difat():
sector_type = fat_sector_types.get(sid, sid)
if not isinstance(sector_type, int):
continue
fat_sectors.append(sid)
# len(fat_sectors),self.fat_sector_count
# assert len(fat_sectors) == self.fat_sector_count
if len(fat_sectors) != self.fat_sector_count:
logging.warn("fat sector count missmatch difat: %d header: %d" % (len(fat_sectors), self.fat_sector_count))
self.fat_sector_count = len(fat_sectors)
for sid in fat_sectors:
pos = (sid + 1) * self.sector_size
f.seek(pos)
extend_sid_table(f, self.fat, self.sector_size)
sector_count += 1
if sys.byteorder == 'big':
self.fat.byteswap()
for i,v in enumerate(self.fat):
if v == FREESECT:
self.fat_freelist.append(i)
logging.debug("read %d fat sectors ", sector_count)
if self.sector_size == 4096 and len(self.fat) > RANGELOCKSECT:
if self.fat[RANGELOCKSECT] != ENDOFCHAIN:
logging.warn("range lock sector has data")
# logging.debug("fat: %s" % str(pretty_sectors(self.fat)))
def write_fat(self):
logging.debug("writing fat")
f = self.f
sector_count = 0
assert len(self.fat)*4 % self.sector_size == 0
fat_sectors = []
for t, i, sid in self.iter_difat():
sector_type = fat_sector_types.get(sid, sid)
if not isinstance(sector_type, int):
continue
fat_sectors.append(sid)
# check that the difat has enough entries to hold the current fat
assert len(fat_sectors) == len(self.fat)*4 // self.sector_size
element_count = self.sector_size // 4
fat_table_struct = Struct(str('<%dI' % element_count))
for i, sid in enumerate(fat_sectors):
# logging.debug("writing fat to sid: %d" % sid)
f.seek((sid + 1) * self.sector_size)
start = i * element_count
end = start + element_count
f.write(fat_table_struct.pack(*self.fat[start:end]))
def read_minifat(self):
f = self.f
sector_count = 0
self.minifat = array(str('I'))
for sid in self.get_fat_chain(self.minifat_sector_start):
self.minifat_chain.append(sid)
f.seek((sid + 1) * self.sector_size)
extend_sid_table(f, self.minifat, self.sector_size)
sector_count += 1
if sys.byteorder == 'big':
self.minifat.byteswap()
# mini_stream_byte_size = 0
last_used_sector = 0
for i,v in enumerate(self.minifat):
if v == FREESECT:
self.minifat_freelist.append(i)
else:
last_used_sector = i
# mini_stream_byte_size += self.mini_stream_sector_size
mini_stream_byte_size = ((last_used_sector+1) * self.mini_stream_sector_size)
# for i, sect in enumerate(pretty_sectors(self.minifat)):
# print(i, sect)
logging.debug("read %d mini fat sectors", sector_count)
return mini_stream_byte_size
def write_minifat(self):
f = self.f
sector_count = 0
element_count = self.sector_size // 4
fat_table_struct = Struct(str('<%dI' % element_count))
for i, sid in enumerate(self.get_fat_chain(self.minifat_sector_start)):
pos = (sid + 1) * self.sector_size
f.seek(pos)
start = i * element_count
end = start + element_count
f.write(fat_table_struct.pack(*self.minifat[start:end]))
def write_modified_dir_entries(self):
f = self.f
for dir_id in sorted(self.modified):
entry = self.modified[dir_id]
stream_pos = entry.dir_id * 128
chain_index = stream_pos // self.sector_size
sid_offset = stream_pos % self.sector_size
sid = self.dir_fat_chain[chain_index]
pos = ((sid + 1) * self.sector_size) + sid_offset
f.seek(pos)
f.write(entry.data)
# invalidate sector
if sid in self.sector_cache:
del self.sector_cache[sid]
self.modified = {}
def write_dir_entries(self):
self.write_modified_dir_entries()
# clear empty DirEntrys
empty_dir = bytearray(128)
f = self.f
self.dir_freelist.sort()
for dir_id in self.dir_freelist:
stream_pos = dir_id * 128
chain_index = stream_pos // self.sector_size
sid_offset = stream_pos % self.sector_size
sid = self.dir_fat_chain[chain_index]
pos = ((sid + 1) * self.sector_size) + sid_offset
f.seek(pos)
f.write(empty_dir)
def next_free_minifat_sect(self):
idx_per_sect = self.sector_size // self.mini_stream_sector_size
stream_sects = len(self.mini_stream_chain) * idx_per_sect
if self.minifat_freelist:
i = self.minifat_freelist.pop(0)
assert self.minifat[i] == FREESECT
if i+1 > stream_sects:
self.mini_stream_grow()
return i
# if we got here need to add aditional fat
sid = self.next_free_sect()
# logging.warn("growing minifat to sid %d" % sid)
idx_start = len(self.minifat)
idx_end = idx_start + self.sector_size // 4
self.minifat.extend([FREESECT for i in range(idx_start, idx_end)])
self.minifat_freelist.extend([i for i in range(idx_start, idx_end)])
if self.minifat_sector_count == 0:
self.minifat_sector_count = 1
self.minifat_sector_start = sid
else:
self.minifat_sector_count += 1
self.fat[self.minifat_chain[-1]] = sid
self.minifat_chain.append(sid)
self.fat[sid] = ENDOFCHAIN
return self.next_free_minifat_sect()
def next_free_sect(self):
if self.fat_freelist:
# print("using fat free list")
i = self.fat_freelist.pop(0)
assert self.fat[i] == FREESECT
# Handle Range Lock Sector
if i == RANGELOCKSECT and self.sector_size == 4096:
self.fat[i] = ENDOFCHAIN
logging.warning("range lock sector in fat freelist, marking ENDOFCHAIN")
return self.next_free_sect()
return i
# if we got here need to add aditional fat
# logging.debug("fat full, growing")
difat_table = None
difat_index = None
for t, i, v in self.iter_difat():
if v == FREESECT:
difat_table = t
difat_index = i
break
new_difat_sect = None
if difat_index is None:
new_difat_sect = len(self.fat) + 1
logging.debug("adding new difat to sid: %d" % new_difat_sect)
if self.difat_sector_count == 0:
self.difat_sector_start = new_difat_sect
self.difat_sector_count = 1
else:
self.difat[-1][-1] = new_difat_sect
self.difat_sector_count += 1
# add difat table
difat = []
for i in range(self.sector_size // 4):
difat.append(FREESECT)
difat[-1] == ENDOFCHAIN
self.difat.append(difat)
for t, i, v in self.iter_difat():
if v == FREESECT:
difat_table = t
difat_index = i
break
new_fat_sect = len(self.fat)
# logging.debug("adding new fat to sid: %d" % new_fat_sect)
self.difat[difat_table][difat_index] = new_fat_sect
# grow fat entries
idx_start = len(self.fat)
idx_end = idx_start + (self.sector_size // 4)
self.fat.extend([FREESECT for i in range(self.sector_size // 4)])
non_free_sids = set([new_fat_sect, new_difat_sect])
# Handle Range Lock Sector
# The range lock sector is the sector
# that covers file offsets 0x7FFFFF00-0x7FFFFFFF in the file
if RANGELOCKSECT < idx_end and RANGELOCKSECT > idx_start and self.sector_size == 4096:
non_free_sids.add(RANGELOCKSECT)
logging.debug("adding range lock")
self.fat[RANGELOCKSECT] = ENDOFCHAIN
freelist = [i for i in range(idx_start, idx_end) if i not in non_free_sids]
self.fat_freelist.extend(freelist)
self.fat[new_fat_sect] = FATSECT
self.fat_sector_count += 1
if not new_difat_sect is None:
self.fat[new_difat_sect] = DIFSECT
return self.next_free_sect()
def read_sector_data(self, sid):
sector_data = self.sector_cache.get(sid, None)
if sector_data is not None:
return sector_data
else:
pos = (sid + 1) * self.sector_size
self.f.seek(pos)
sector_data = bytearray(self.sector_size)
#NOTE: if requested sector doesn't exsit or
# is truncated will padd with zeros, expected behavour
bytes_read = self.f.readinto(sector_data)
self.sector_cache[sid] = sector_data
return sector_data
def get_sid_offset(self, abs_pos):
sid, sid_offset = divmod(abs_pos, self.sector_size)
return sid-1, sid_offset
def dir_entry_sid_offset(self, dir_id):
stream_pos = dir_id * 128
chain_index, sid_offset = divmod(stream_pos, self.sector_size)
sid = self.dir_fat_chain[chain_index]
return sid, sid_offset
def dir_entry_pos(self, dir_id):
sid, sid_offset = self.dir_entry_sid_offset(dir_id)
pos = ((sid + 1) * self.sector_size) + sid_offset
return pos
def read_dir_entry(self, dir_id, parent = None):
if dir_id is None:
return None
entry = self.dir_cache.get(dir_id, None)
if entry is not None:
return entry
# assert not dir_id in self.dir_freelist
stream_pos = dir_id * 128
chain_index = stream_pos // self.sector_size
sid_offset = stream_pos % self.sector_size
sid = self.dir_fat_chain[chain_index]
sector_data = self.read_sector_data(sid)
data= bytearray(sector_data[sid_offset:sid_offset+128])
entry = DirEntry(self, dir_id, data=data)
entry.parent = parent
self.dir_cache[dir_id] = entry
return entry
def clear_sector(self, sid):
sector_pos = (sid + 1) * self.sector_size
self.f.seek(sector_pos)
# for i in range(self.sector_size):
self.f.write(bytearray(self.sector_size))
def next_free_dir_id(self):
# use free list first
if self.dir_freelist:
return self.dir_freelist.pop(0)
f = self.f
sect = self.fat_chain_append(self.dir_fat_chain[-1])
self.dir_fat_chain.append(sect)
self.dir_sector_count += 1
first_dir_id = (len(self.dir_fat_chain) - 1) * self.sector_size // 128
last_dir_id = first_dir_id + (self.sector_size // 128)
self.dir_freelist.extend(range(first_dir_id, last_dir_id))
return self.next_free_dir_id()
def get_fat_chain(self, start_sid, minifat=False):
fat = self.fat
fat_name = "FAT"
if minifat:
fat = self.minifat
fat_name = "MINIFAT"
# Floyd's Tortoise and Hare cycle-finding algorithm
a = start_sid
b = start_sid
sectors = []
if start_sid in (None, ENDOFCHAIN, FREESECT, DIFSECT, FATSECT):
return []
while b != ENDOFCHAIN:
sectors.append(b)
b = fat[b]
if a != ENDOFCHAIN:
a = fat[a]
if a != ENDOFCHAIN:
a = fat[a]
if a == b:
raise CompoundFileBinaryError('cyclic %s fat chain found starting at %d' % (fat_name, start_sid))
return sectors
def mini_stream_grow(self):
sid = self.next_free_sect()
# logging.debug("adding to mini stream fat sid: %d" % sid)
if not self.mini_stream_chain:
self.mini_stream_chain = [sid]
self.root.sector_id = sid
else:
self.fat[self.mini_stream_chain[-1]] = sid
self.mini_stream_chain.append(sid)
self.fat[sid] = ENDOFCHAIN
def fat_chain_append(self, start_sid, minifat=False):
if minifat:
sect = self.next_free_minifat_sect()
# logging.debug("creating new mini sector: %d" % sect)
fat = self.minifat
else:
sect = self.next_free_sect()
# logging.debug("creating new sector: %d" % sect)
fat = self.fat
if start_sid is None:
fat[sect] = ENDOFCHAIN
else:
fat_chain = self.get_fat_chain(start_sid, minifat)
assert fat_chain
fat[fat_chain[-1]] = sect
fat[sect] = ENDOFCHAIN
return sect
def free_fat_chain(self, start_sid, minifat=False):
fat =self.fat
if minifat:
fat = self.minifat
for sid in self.get_fat_chain(start_sid, minifat):
fat[sid] = FREESECT
if minifat:
self.minifat_freelist.insert(0, sid)
else:
self.fat_freelist.insert(0, sid)
def create_dir_entry(self, path, dir_type='storage', class_id=None):
if self.exists(path):
raise ValueError("%s already exists" % path)
dirname = os.path.dirname(path)
basename = os.path.basename(path)
root = self.find(dirname)
if root is None:
raise ValueError("parent dirname does not exist: %s" % dirname)
if not root.type in ('storage', 'root storage'):
raise ValueError("can not add entry to non storage type")
dir_id = self.next_free_dir_id()
logging.debug("next dir id %d" % dir_id)
entry = DirEntry(self, dir_id)
entry.name = basename
entry.type = dir_type
entry.class_id = class_id
# TODO: Implement a Red Black tree
# all new DirEntries are black, so there is no tree balancing.
# AAF Low-Level Container Specification says its alright to do this.
entry.color = 'black'
root.add_child(entry)
self.dir_cache[dir_id] = entry
return entry
def free_dir_entry(self, entry):
# add freelist
self.dir_freelist.append(entry.dir_id)
# remove from dir caches
if entry.dir_id in self.dir_cache:
del self.dir_cache[entry.dir_id]
if entry.dir_id in self.children_cache:
del self.children_cache[entry.dir_id]
if entry.dir_id in self.modified:
del self.modified[entry.dir_id]
entry.dir_id = None
def remove(self, path):
"""
Removes both streams and storage DirEntry types from file.
storage type entries need to be empty dirs.
"""
entry = self.find(path)
if not entry:
raise ValueError("%s does not exists" % path)
if entry.type == 'root storage':
raise ValueError("can no remove root entry")
if entry.type == "storage" and not entry.child_id is None:
raise ValueError("storage contains children")
entry.pop()
# remove stream data
if entry.type == "stream":
self.free_fat_chain(entry.sector_id, entry.byte_size < self.min_stream_max_size)
self.free_dir_entry(entry)
def rmtree(self, path):
"""
Removes directory structure, similar to shutil.rmtree.
"""
for root, storage, streams in self.walk(path, topdown=False):
for item in streams:
self.free_fat_chain(item.sector_id, item.byte_size < self.min_stream_max_size)
self.free_dir_entry(item)
for item in storage:
self.free_dir_entry(item)
root.child_id = None
# remove root item
self.remove(path)
def listdir(self, path = None):
"""
Return a list containing the ``DirEntry`` objects in the directory
given by path.
"""
result = self.listdir_dict(path)
return result.values()
def listdir_dict(self, path = None):
"""
Return a dict containing the ``DirEntry`` objects in the directory
given by path with name of the dir as key.
"""
if path is None:
path = self.root
root = self.find(path)
if root is None:
raise ValueError("unable to find dir: %s" % str(path))
if not root.isdir():
raise ValueError("can only list storage types")
children = self.children_cache.get(root.dir_id, None)
if children is not None:
return children
child = root.child()
result = {}
if not child:
self.children_cache[root.dir_id] = result
return result
dir_per_sector = self.sector_size // 128
max_dirs_entries = self.dir_sector_count * dir_per_sector
stack = deque([child])
count = 0
while stack:
current = stack.pop()
result[current.name] = current
count += 1
if count > max_dirs_entries:
raise CompoundFileBinaryError("corrupt folder structure")
left = current.left()
if left:
stack.append(left)
right = current.right()
if right:
stack.append(right)
self.children_cache[root.dir_id] = result
return result
def find(self, path):
"""
find a ``DirEntry`` located at *path*. Returns ``None`` if path
does not exist.
"""
if isinstance(path, DirEntry):
return path
if path == "/":
return self.root
split_path = path.lstrip('/').split("/")
i = 0
root = self.root
while True:
children = self.listdir_dict(root)
match = children.get(split_path[i], None)
if match:
if i == len(split_path) - 1:
return match
root = match
i += 1
else:
return None
def walk(self, path = None, topdown=True):
"""
Similar to :func:`os.walk`, yeields a 3-tuple ``(root, storage_items, stream_items)``
"""
if path is None:
path = self.root
root = self.find(path)
if not root.isdir():
raise ValueError("can only walk storage types")
if not root.child_id:
return
if topdown:
storage_items = []
stream_items = []
for item in self.listdir(root):
if item.isdir():
storage_items.append(item)
else:
stream_items.append(item)
yield root, storage_items, stream_items
for item in storage_items:
for root, storage_items, stream_items in self.walk(item):
yield root, storage_items, stream_items
else:
def topdown_visit_node(root):
storage_items = []
stream_items = []
for item in self.listdir(root):
if item.isdir():
for sub_root, sub_storage, sub_stream in topdown_visit_node(item):
yield sub_root, sub_storage, sub_stream
storage_items.append(item)
else:
stream_items.append(item)
yield root, storage_items, stream_items
for root_item, storage, stream in topdown_visit_node(root):
yield root_item, storage, stream
def exists(self, path):
"""
Return ``True`` if path refers to a existing path.
"""
if self.find(path) is None:
return False
return True
def makedir(self, path, class_id=None):
"""
Create a storage DirEntry name path
"""
return self.create_dir_entry(path, dir_type='storage', class_id=class_id)
def move(self, src, dst):
"""
Moves ``DirEntry`` from src to dst
"""
src_entry = self.find(src)
if src_entry is None:
raise ValueError("src path does not exist: %s" % src)
if dst.endswith('/'):
dst += src_entry.name
if self.exists(dst):
raise ValueError("dst path already exist: %s" % dst)
if dst == '/' or src == '/':
raise ValueError("cannot overwrite root dir")
split_path = dst.strip('/').split('/')
dst_basename = split_path[-1]
dst_dirname = '/' + '/'.join(split_path[:-1])
# print(dst)
# print(dst_basename, dst_dirname)
dst_entry = self.find(dst_dirname)
if dst_entry is None:
raise ValueError("src path does not exist: %s" % dst_dirname)
if not dst_entry.isdir():
raise ValueError("dst dirname cannot be stream: %s" % dst_dirname)
# src_entry.parent.remove_child(src_entry)
src_entry.pop()
src_entry.parent = None
src_entry.name = dst_basename
dst_entry.add_child(src_entry)
self.children_cache[dst_entry.dir_id][src_entry.name] = src_entry
return src_entry
def open(self, path, mode='r'):
"""Open stream, returning ``Stream`` object"""
entry = self.find(path)
if entry is None:
if mode == 'r':
raise ValueError("stream does not exists: %s" % path)
entry = self.create_dir_entry(path, 'stream', None)
else:
if not entry.isfile():
raise ValueError("can only open stream type DirEntry's")
if mode == 'w':
logging.debug("stream: %s exists, overwriting" % path)
self.free_fat_chain(entry.sector_id, entry.byte_size < self.min_stream_max_size)
entry.sector_id = None
entry.byte_size = 0
entry.class_id = None
elif mode == 'rw':
pass
s = Stream(self, entry, mode)
return s
|
markreidvfx/pyaaf2
|
aaf2/cfb.py
|
CompoundFileBinary.move
|
python
|
def move(self, src, dst):
src_entry = self.find(src)
if src_entry is None:
raise ValueError("src path does not exist: %s" % src)
if dst.endswith('/'):
dst += src_entry.name
if self.exists(dst):
raise ValueError("dst path already exist: %s" % dst)
if dst == '/' or src == '/':
raise ValueError("cannot overwrite root dir")
split_path = dst.strip('/').split('/')
dst_basename = split_path[-1]
dst_dirname = '/' + '/'.join(split_path[:-1])
# print(dst)
# print(dst_basename, dst_dirname)
dst_entry = self.find(dst_dirname)
if dst_entry is None:
raise ValueError("src path does not exist: %s" % dst_dirname)
if not dst_entry.isdir():
raise ValueError("dst dirname cannot be stream: %s" % dst_dirname)
# src_entry.parent.remove_child(src_entry)
src_entry.pop()
src_entry.parent = None
src_entry.name = dst_basename
dst_entry.add_child(src_entry)
self.children_cache[dst_entry.dir_id][src_entry.name] = src_entry
return src_entry
|
Moves ``DirEntry`` from src to dst
|
train
|
https://github.com/markreidvfx/pyaaf2/blob/37de8c10d3c3495cc00c705eb6c5048bc4a7e51f/aaf2/cfb.py#L1821-L1862
|
[
"def find(self, path):\n \"\"\"\n find a ``DirEntry`` located at *path*. Returns ``None`` if path\n does not exist.\n \"\"\"\n\n if isinstance(path, DirEntry):\n return path\n\n if path == \"/\":\n return self.root\n\n split_path = path.lstrip('/').split(\"/\")\n\n i = 0\n root = self.root\n\n while True:\n\n children = self.listdir_dict(root)\n match = children.get(split_path[i], None)\n\n if match:\n if i == len(split_path) - 1:\n return match\n root = match\n i += 1\n else:\n return None\n",
"def exists(self, path):\n \"\"\"\n Return ``True`` if path refers to a existing path.\n \"\"\"\n if self.find(path) is None:\n return False\n return True\n"
] |
class CompoundFileBinary(object):
def __init__(self, file_object, mode='rb', sector_size=4096):
self.f = file_object
self.difat = [[]]
self.fat = array(str('I'))
self.fat_freelist = []
self.minifat = array(str('I'))
self.minifat_freelist = []
self.difat_chain = []
self.minifat_chain = []
self.dir_fat_chain = []
self.mini_stream_chain = []
self.modified = {}
self.sector_cache = LRUCacheDict()
self.dir_cache = weakref.WeakValueDictionary()
self.children_cache = LRUCacheDict()
self.dir_freelist = []
self.debug_grow = False
self.is_open = True
if isinstance(self.f, BytesIO):
self.mode = 'wb+'
else:
self.mode = mode
if self.mode in ("r", "r+", "rb", 'rb+'):
self.read_header()
self.read_fat()
mini_stream_byte_size = self.read_minifat()
# create dir_fat_chain and read root dir entry
self.dir_fat_chain = self.get_fat_chain(self.dir_sector_start)
if len(self.dir_fat_chain) != self.dir_sector_count:
logging.info("read dir_sector_count missmatch, using fat chain length")
self.dir_sector_count = len(self.dir_fat_chain)
logging.debug("read %d dir sectors" % len(self.dir_fat_chain))
self.root = self.read_dir_entry(0)
self.dir_cache[0] = self.root
# create mini stream fat chain
if self.minifat_sector_count:
self.mini_stream_chain = self.get_fat_chain(self.root.sector_id)
if self.root.sector_id is not None and mini_stream_byte_size != self.root.byte_size:
message = "mini stream size missmatch: %d != %d, using size from minifat"
logging.warn(message % (self.root.byte_size, mini_stream_byte_size))
else:
self.setup_empty(sector_size)
self.write_header()
logging.debug("pos: %d" % self.f.tell())
logging.debug("writing root dir sector")
self.root.write()
self.f.write(bytearray(self.sector_size - 128))
self.write_fat()
def close(self):
if self.mode in ("r", "rb"):
return
# caculate mini stream size
if self.root.sector_id is not None:
# I cannot find this documented anywhere but the size of the mini stream
# is the size up to the last mini sector is uses. Not the total Non FREESECT's.
# If self.root.byte_size is not set correctly the some appications will crash hard...
# find last non-free sect
for i,v in enumerate(reversed(self.minifat)):
if v != FREESECT:
break
last_used_sector_id = len(self.minifat) - i
mini_stream_byte_size = (last_used_sector_id * self.mini_stream_sector_size)
self.root.byte_size = mini_stream_byte_size
# Truncate ministream
s = Stream(self, self.root, 'rw')
s.truncate(mini_stream_byte_size)
self.write_header()
self.write_difat()
self.write_fat()
self.write_minifat()
self.write_dir_entries()
# Truncate file to the last free sector
for i,v in enumerate(reversed(self.fat)):
if v != FREESECT:
break
last_used_sector_id = len(self.fat) - i
pos = (last_used_sector_id + 1) * self.sector_size
self.f.seek(pos)
self.f.truncate()
self.is_open = False
def setup_empty(self, sector_size):
if sector_size == 4096:
self.class_id = auid.AUID("0d010201-0200-0000-060e-2b3403020101")
elif sector_size == 512:
self.class_id = auid.AUID("42464141-000d-4d4f-060e-2b34010101ff")
else:
raise ValueError("sector size must be 4096 or 512")
self.major_version = 4
self.minor_version = 62
self.byte_order = "le"
self.sector_size = sector_size
self.mini_stream_sector_size = 64
self.dir_sector_count = 1
self.fat_sector_count = 1
self.dir_sector_start = 0
self.transaction_signature = 1
self.min_stream_max_size = 4096
self.minifat_sector_start = FREESECT
self.minifat_sector_count = 0
self.difat_sector_start = FREESECT
self.difat_sector_count = 0
self.difat = [[]]
for i in range(109):
self.difat[0].append(FREESECT)
self.difat[0][0] = 1
for i in range(self.sector_size // 4):
self.fat.append(FREESECT)
if i > 1:
self.fat_freelist.append(i)
self.fat[0] = ENDOFCHAIN # end of dir chain
self.fat[self.difat[0][0]] = FATSECT
self.root = DirEntry(self, 0)
self.root.name = 'Root Entry'
self.root.sector_id = None
self.root.type = 'root storage'
self.root.class_id = auid.AUID("b3b398a5-1c90-11d4-8053-080036210804")
self.dir_cache[0] = self.root
self.dir_fat_chain = [0]
# raise NotImplementedError("mode: %s supported not implemented" % self.f.mode)
def write_header(self):
logging.debug("writiing header")
f = self.f
f.seek(0)
f.write(b'\xd0\xcf\x11\xe0\xa1\xb1\x1a\xe1') # Magic
f.write(self.class_id.bytes_le)
write_u16le(f, self.minor_version)
write_u16le(f, self.major_version)
write_u16le(f, 0xFFFE) #byte order le
write_u16le(f, int(math.log(self.sector_size, 2)))
write_u16le(f, int(math.log(self.mini_stream_sector_size, 2)))
f.write(b'\0' * 6) #skip reseverd
write_u32le(f, self.dir_sector_count)
write_u32le(f, self.fat_sector_count)
write_u32le(f, self.dir_sector_start)
write_u32le(f, self.transaction_signature)
write_u32le(f, self.min_stream_max_size)
write_u32le(f, self.minifat_sector_start)
write_u32le(f, self.minifat_sector_count)
write_u32le(f, self.difat_sector_start)
write_u32le(f, self.difat_sector_count)
for i in range(109):
write_u32le(f, self.difat[0][i])
for i in range(self.sector_size - f.tell()):
f.write(b'\0')
def read_header(self):
f = self.f
f.seek(0)
magic = f.read(8)
# logging.debug("magic: %s" % magic.encode("hex"))
logging.debug("magic: %s" % str([magic]))
# clsid = f.read(16)
# logging.debug("clsid: %s" % clsid.encode("hex"))
self.class_id = auid.AUID(bytes_le=f.read(16))
logging.debug("clsid: %s" % str(self.class_id))
self.minor_version = read_u16le(f)
logging.debug("minor_version: %d" % self.minor_version)
self.major_version = read_u16le(f)
logging.debug("major_version: %d" % self.major_version)
byte_order = read_u16le(f)
if byte_order == 0xFFFE:
self.byte_order = 'le'
else:
raise NotImplementedError("endian format:0x%X not supported" % byte_order)
logging.debug("byte_order: %s" % self.byte_order)
size = read_u16le(f)
self.sector_size = pow(2, size)
logging.debug("sector_size: %d -> %d" % (size, self.sector_size))
size = read_u16le(f)
self.mini_stream_sector_size = pow(2, size)
logging.debug("mini_stream_sector_size: %d -> %d" % (size, self.mini_stream_sector_size))
if not self.sector_size in (4096, 512):
raise NotImplementedError("unsupported sector size: %d" % self.sector_size)
if self.mini_stream_sector_size != 64:
raise NotImplementedError("unsupported mini sector size: %d" % self.mini_stream_sector_size)
f.read(6) #skip reseverd
self.dir_sector_count = read_u32le(f)
logging.debug("dir_sector_count: %d" % self.dir_sector_count)
self.fat_sector_count = read_u32le(f)
logging.debug("fat_sector_count: %d" % self.fat_sector_count)
self.dir_sector_start = read_u32le(f)
logging.debug("dir_sector_start: %d" % self.dir_sector_start)
self.transaction_signature = read_u32le(f)
logging.debug("transaction_signature: %d" % self.transaction_signature)
self.min_stream_max_size = read_u32le(f)
logging.debug("min_stream_max_size: %d" % self.min_stream_max_size)
self.minifat_sector_start = read_u32le(f)
logging.debug("minifat_sector_start: %d" % self.minifat_sector_start)
self.minifat_sector_count = read_u32le(f)
logging.debug("minifat_sector_count: %d" % self.minifat_sector_count)
self.difat_sector_start = read_u32le(f)
logging.debug("difat_sector_start: %d" % self.difat_sector_start)
self.difat_sector_count = read_u32le(f)
logging.debug("difat_sector_count: %d" % self.difat_sector_count)
self.difat = [[]]
logging.debug("reading header difat at %d" % f.tell())
for i in range(109):
item = read_u32le(f)
# item = fat_sector_types.get(item, item)
self.difat[0].append(item)
sectors_left = self.difat_sector_count
sid = self.difat_sector_start
# reading difat sectors
while sectors_left:
logging.debug("reading difat sid: %d", sid)
sector_type = fat_sector_types.get(sid, sid)
if not isinstance(sector_type, int):
break
self.difat_chain.append(sid)
f.seek((sid + 1) * self.sector_size)
difat = []
for i in range( (self.sector_size // 4)):
item = read_u32le(f)
difat.append(item)
self.difat.append(difat)
sid = difat[-1]
logging.debug("next difat: %d" % sid)
sectors_left -= 1
def iter_difat(self):
for i, sid in enumerate(self.difat[0]):
yield 0, i, sid
t = 1
for item in self.difat[1:]:
for i, sid in enumerate(item[:-1]):
yield t, i, sid
t+=1
def write_difat(self):
f = self.f
# write header entries
f.seek(76)
logging.debug("writing header difat")
for i in range(109):
write_u32le(f, self.difat[0][i])
for i in range(self.sector_size - f.tell()):
f.write(b'\0')
if self.difat_sector_count == 0:
return
sid = self.difat_sector_start
assert len(self.difat[1:]) == self.difat_sector_count
for table in self.difat[1:]:
sector_type = fat_sector_types.get(sid, sid)
if not isinstance(sector_type, int):
raise IOError("bad difat sector type")
pos = (sid + 1) * self.sector_size
logging.debug("writing difat to sid: %d at: %d" % (sid,pos))
f.seek(pos)
for i in range(self.sector_size // 4):
write_u32le(f, table[i])
sid = table[-1]
def read_fat(self):
f = self.f
self.fat = array(str('I'))
sector_count = 0
fat_sectors = []
for t, i, sid in self.iter_difat():
sector_type = fat_sector_types.get(sid, sid)
if not isinstance(sector_type, int):
continue
fat_sectors.append(sid)
# len(fat_sectors),self.fat_sector_count
# assert len(fat_sectors) == self.fat_sector_count
if len(fat_sectors) != self.fat_sector_count:
logging.warn("fat sector count missmatch difat: %d header: %d" % (len(fat_sectors), self.fat_sector_count))
self.fat_sector_count = len(fat_sectors)
for sid in fat_sectors:
pos = (sid + 1) * self.sector_size
f.seek(pos)
extend_sid_table(f, self.fat, self.sector_size)
sector_count += 1
if sys.byteorder == 'big':
self.fat.byteswap()
for i,v in enumerate(self.fat):
if v == FREESECT:
self.fat_freelist.append(i)
logging.debug("read %d fat sectors ", sector_count)
if self.sector_size == 4096 and len(self.fat) > RANGELOCKSECT:
if self.fat[RANGELOCKSECT] != ENDOFCHAIN:
logging.warn("range lock sector has data")
# logging.debug("fat: %s" % str(pretty_sectors(self.fat)))
def write_fat(self):
logging.debug("writing fat")
f = self.f
sector_count = 0
assert len(self.fat)*4 % self.sector_size == 0
fat_sectors = []
for t, i, sid in self.iter_difat():
sector_type = fat_sector_types.get(sid, sid)
if not isinstance(sector_type, int):
continue
fat_sectors.append(sid)
# check that the difat has enough entries to hold the current fat
assert len(fat_sectors) == len(self.fat)*4 // self.sector_size
element_count = self.sector_size // 4
fat_table_struct = Struct(str('<%dI' % element_count))
for i, sid in enumerate(fat_sectors):
# logging.debug("writing fat to sid: %d" % sid)
f.seek((sid + 1) * self.sector_size)
start = i * element_count
end = start + element_count
f.write(fat_table_struct.pack(*self.fat[start:end]))
def read_minifat(self):
f = self.f
sector_count = 0
self.minifat = array(str('I'))
for sid in self.get_fat_chain(self.minifat_sector_start):
self.minifat_chain.append(sid)
f.seek((sid + 1) * self.sector_size)
extend_sid_table(f, self.minifat, self.sector_size)
sector_count += 1
if sys.byteorder == 'big':
self.minifat.byteswap()
# mini_stream_byte_size = 0
last_used_sector = 0
for i,v in enumerate(self.minifat):
if v == FREESECT:
self.minifat_freelist.append(i)
else:
last_used_sector = i
# mini_stream_byte_size += self.mini_stream_sector_size
mini_stream_byte_size = ((last_used_sector+1) * self.mini_stream_sector_size)
# for i, sect in enumerate(pretty_sectors(self.minifat)):
# print(i, sect)
logging.debug("read %d mini fat sectors", sector_count)
return mini_stream_byte_size
def write_minifat(self):
f = self.f
sector_count = 0
element_count = self.sector_size // 4
fat_table_struct = Struct(str('<%dI' % element_count))
for i, sid in enumerate(self.get_fat_chain(self.minifat_sector_start)):
pos = (sid + 1) * self.sector_size
f.seek(pos)
start = i * element_count
end = start + element_count
f.write(fat_table_struct.pack(*self.minifat[start:end]))
def write_modified_dir_entries(self):
f = self.f
for dir_id in sorted(self.modified):
entry = self.modified[dir_id]
stream_pos = entry.dir_id * 128
chain_index = stream_pos // self.sector_size
sid_offset = stream_pos % self.sector_size
sid = self.dir_fat_chain[chain_index]
pos = ((sid + 1) * self.sector_size) + sid_offset
f.seek(pos)
f.write(entry.data)
# invalidate sector
if sid in self.sector_cache:
del self.sector_cache[sid]
self.modified = {}
def write_dir_entries(self):
self.write_modified_dir_entries()
# clear empty DirEntrys
empty_dir = bytearray(128)
f = self.f
self.dir_freelist.sort()
for dir_id in self.dir_freelist:
stream_pos = dir_id * 128
chain_index = stream_pos // self.sector_size
sid_offset = stream_pos % self.sector_size
sid = self.dir_fat_chain[chain_index]
pos = ((sid + 1) * self.sector_size) + sid_offset
f.seek(pos)
f.write(empty_dir)
def next_free_minifat_sect(self):
idx_per_sect = self.sector_size // self.mini_stream_sector_size
stream_sects = len(self.mini_stream_chain) * idx_per_sect
if self.minifat_freelist:
i = self.minifat_freelist.pop(0)
assert self.minifat[i] == FREESECT
if i+1 > stream_sects:
self.mini_stream_grow()
return i
# if we got here need to add aditional fat
sid = self.next_free_sect()
# logging.warn("growing minifat to sid %d" % sid)
idx_start = len(self.minifat)
idx_end = idx_start + self.sector_size // 4
self.minifat.extend([FREESECT for i in range(idx_start, idx_end)])
self.minifat_freelist.extend([i for i in range(idx_start, idx_end)])
if self.minifat_sector_count == 0:
self.minifat_sector_count = 1
self.minifat_sector_start = sid
else:
self.minifat_sector_count += 1
self.fat[self.minifat_chain[-1]] = sid
self.minifat_chain.append(sid)
self.fat[sid] = ENDOFCHAIN
return self.next_free_minifat_sect()
def next_free_sect(self):
if self.fat_freelist:
# print("using fat free list")
i = self.fat_freelist.pop(0)
assert self.fat[i] == FREESECT
# Handle Range Lock Sector
if i == RANGELOCKSECT and self.sector_size == 4096:
self.fat[i] = ENDOFCHAIN
logging.warning("range lock sector in fat freelist, marking ENDOFCHAIN")
return self.next_free_sect()
return i
# if we got here need to add aditional fat
# logging.debug("fat full, growing")
difat_table = None
difat_index = None
for t, i, v in self.iter_difat():
if v == FREESECT:
difat_table = t
difat_index = i
break
new_difat_sect = None
if difat_index is None:
new_difat_sect = len(self.fat) + 1
logging.debug("adding new difat to sid: %d" % new_difat_sect)
if self.difat_sector_count == 0:
self.difat_sector_start = new_difat_sect
self.difat_sector_count = 1
else:
self.difat[-1][-1] = new_difat_sect
self.difat_sector_count += 1
# add difat table
difat = []
for i in range(self.sector_size // 4):
difat.append(FREESECT)
difat[-1] == ENDOFCHAIN
self.difat.append(difat)
for t, i, v in self.iter_difat():
if v == FREESECT:
difat_table = t
difat_index = i
break
new_fat_sect = len(self.fat)
# logging.debug("adding new fat to sid: %d" % new_fat_sect)
self.difat[difat_table][difat_index] = new_fat_sect
# grow fat entries
idx_start = len(self.fat)
idx_end = idx_start + (self.sector_size // 4)
self.fat.extend([FREESECT for i in range(self.sector_size // 4)])
non_free_sids = set([new_fat_sect, new_difat_sect])
# Handle Range Lock Sector
# The range lock sector is the sector
# that covers file offsets 0x7FFFFF00-0x7FFFFFFF in the file
if RANGELOCKSECT < idx_end and RANGELOCKSECT > idx_start and self.sector_size == 4096:
non_free_sids.add(RANGELOCKSECT)
logging.debug("adding range lock")
self.fat[RANGELOCKSECT] = ENDOFCHAIN
freelist = [i for i in range(idx_start, idx_end) if i not in non_free_sids]
self.fat_freelist.extend(freelist)
self.fat[new_fat_sect] = FATSECT
self.fat_sector_count += 1
if not new_difat_sect is None:
self.fat[new_difat_sect] = DIFSECT
return self.next_free_sect()
def read_sector_data(self, sid):
sector_data = self.sector_cache.get(sid, None)
if sector_data is not None:
return sector_data
else:
pos = (sid + 1) * self.sector_size
self.f.seek(pos)
sector_data = bytearray(self.sector_size)
#NOTE: if requested sector doesn't exsit or
# is truncated will padd with zeros, expected behavour
bytes_read = self.f.readinto(sector_data)
self.sector_cache[sid] = sector_data
return sector_data
def get_sid_offset(self, abs_pos):
sid, sid_offset = divmod(abs_pos, self.sector_size)
return sid-1, sid_offset
def dir_entry_sid_offset(self, dir_id):
stream_pos = dir_id * 128
chain_index, sid_offset = divmod(stream_pos, self.sector_size)
sid = self.dir_fat_chain[chain_index]
return sid, sid_offset
def dir_entry_pos(self, dir_id):
sid, sid_offset = self.dir_entry_sid_offset(dir_id)
pos = ((sid + 1) * self.sector_size) + sid_offset
return pos
def read_dir_entry(self, dir_id, parent = None):
if dir_id is None:
return None
entry = self.dir_cache.get(dir_id, None)
if entry is not None:
return entry
# assert not dir_id in self.dir_freelist
stream_pos = dir_id * 128
chain_index = stream_pos // self.sector_size
sid_offset = stream_pos % self.sector_size
sid = self.dir_fat_chain[chain_index]
sector_data = self.read_sector_data(sid)
data= bytearray(sector_data[sid_offset:sid_offset+128])
entry = DirEntry(self, dir_id, data=data)
entry.parent = parent
self.dir_cache[dir_id] = entry
return entry
def clear_sector(self, sid):
sector_pos = (sid + 1) * self.sector_size
self.f.seek(sector_pos)
# for i in range(self.sector_size):
self.f.write(bytearray(self.sector_size))
def next_free_dir_id(self):
# use free list first
if self.dir_freelist:
return self.dir_freelist.pop(0)
f = self.f
sect = self.fat_chain_append(self.dir_fat_chain[-1])
self.dir_fat_chain.append(sect)
self.dir_sector_count += 1
first_dir_id = (len(self.dir_fat_chain) - 1) * self.sector_size // 128
last_dir_id = first_dir_id + (self.sector_size // 128)
self.dir_freelist.extend(range(first_dir_id, last_dir_id))
return self.next_free_dir_id()
def get_fat_chain(self, start_sid, minifat=False):
fat = self.fat
fat_name = "FAT"
if minifat:
fat = self.minifat
fat_name = "MINIFAT"
# Floyd's Tortoise and Hare cycle-finding algorithm
a = start_sid
b = start_sid
sectors = []
if start_sid in (None, ENDOFCHAIN, FREESECT, DIFSECT, FATSECT):
return []
while b != ENDOFCHAIN:
sectors.append(b)
b = fat[b]
if a != ENDOFCHAIN:
a = fat[a]
if a != ENDOFCHAIN:
a = fat[a]
if a == b:
raise CompoundFileBinaryError('cyclic %s fat chain found starting at %d' % (fat_name, start_sid))
return sectors
def mini_stream_grow(self):
sid = self.next_free_sect()
# logging.debug("adding to mini stream fat sid: %d" % sid)
if not self.mini_stream_chain:
self.mini_stream_chain = [sid]
self.root.sector_id = sid
else:
self.fat[self.mini_stream_chain[-1]] = sid
self.mini_stream_chain.append(sid)
self.fat[sid] = ENDOFCHAIN
def fat_chain_append(self, start_sid, minifat=False):
if minifat:
sect = self.next_free_minifat_sect()
# logging.debug("creating new mini sector: %d" % sect)
fat = self.minifat
else:
sect = self.next_free_sect()
# logging.debug("creating new sector: %d" % sect)
fat = self.fat
if start_sid is None:
fat[sect] = ENDOFCHAIN
else:
fat_chain = self.get_fat_chain(start_sid, minifat)
assert fat_chain
fat[fat_chain[-1]] = sect
fat[sect] = ENDOFCHAIN
return sect
def free_fat_chain(self, start_sid, minifat=False):
fat =self.fat
if minifat:
fat = self.minifat
for sid in self.get_fat_chain(start_sid, minifat):
fat[sid] = FREESECT
if minifat:
self.minifat_freelist.insert(0, sid)
else:
self.fat_freelist.insert(0, sid)
def create_dir_entry(self, path, dir_type='storage', class_id=None):
if self.exists(path):
raise ValueError("%s already exists" % path)
dirname = os.path.dirname(path)
basename = os.path.basename(path)
root = self.find(dirname)
if root is None:
raise ValueError("parent dirname does not exist: %s" % dirname)
if not root.type in ('storage', 'root storage'):
raise ValueError("can not add entry to non storage type")
dir_id = self.next_free_dir_id()
logging.debug("next dir id %d" % dir_id)
entry = DirEntry(self, dir_id)
entry.name = basename
entry.type = dir_type
entry.class_id = class_id
# TODO: Implement a Red Black tree
# all new DirEntries are black, so there is no tree balancing.
# AAF Low-Level Container Specification says its alright to do this.
entry.color = 'black'
root.add_child(entry)
self.dir_cache[dir_id] = entry
return entry
def free_dir_entry(self, entry):
# add freelist
self.dir_freelist.append(entry.dir_id)
# remove from dir caches
if entry.dir_id in self.dir_cache:
del self.dir_cache[entry.dir_id]
if entry.dir_id in self.children_cache:
del self.children_cache[entry.dir_id]
if entry.dir_id in self.modified:
del self.modified[entry.dir_id]
entry.dir_id = None
def remove(self, path):
"""
Removes both streams and storage DirEntry types from file.
storage type entries need to be empty dirs.
"""
entry = self.find(path)
if not entry:
raise ValueError("%s does not exists" % path)
if entry.type == 'root storage':
raise ValueError("can no remove root entry")
if entry.type == "storage" and not entry.child_id is None:
raise ValueError("storage contains children")
entry.pop()
# remove stream data
if entry.type == "stream":
self.free_fat_chain(entry.sector_id, entry.byte_size < self.min_stream_max_size)
self.free_dir_entry(entry)
def rmtree(self, path):
"""
Removes directory structure, similar to shutil.rmtree.
"""
for root, storage, streams in self.walk(path, topdown=False):
for item in streams:
self.free_fat_chain(item.sector_id, item.byte_size < self.min_stream_max_size)
self.free_dir_entry(item)
for item in storage:
self.free_dir_entry(item)
root.child_id = None
# remove root item
self.remove(path)
def listdir(self, path = None):
"""
Return a list containing the ``DirEntry`` objects in the directory
given by path.
"""
result = self.listdir_dict(path)
return result.values()
def listdir_dict(self, path = None):
"""
Return a dict containing the ``DirEntry`` objects in the directory
given by path with name of the dir as key.
"""
if path is None:
path = self.root
root = self.find(path)
if root is None:
raise ValueError("unable to find dir: %s" % str(path))
if not root.isdir():
raise ValueError("can only list storage types")
children = self.children_cache.get(root.dir_id, None)
if children is not None:
return children
child = root.child()
result = {}
if not child:
self.children_cache[root.dir_id] = result
return result
dir_per_sector = self.sector_size // 128
max_dirs_entries = self.dir_sector_count * dir_per_sector
stack = deque([child])
count = 0
while stack:
current = stack.pop()
result[current.name] = current
count += 1
if count > max_dirs_entries:
raise CompoundFileBinaryError("corrupt folder structure")
left = current.left()
if left:
stack.append(left)
right = current.right()
if right:
stack.append(right)
self.children_cache[root.dir_id] = result
return result
def find(self, path):
"""
find a ``DirEntry`` located at *path*. Returns ``None`` if path
does not exist.
"""
if isinstance(path, DirEntry):
return path
if path == "/":
return self.root
split_path = path.lstrip('/').split("/")
i = 0
root = self.root
while True:
children = self.listdir_dict(root)
match = children.get(split_path[i], None)
if match:
if i == len(split_path) - 1:
return match
root = match
i += 1
else:
return None
def walk(self, path = None, topdown=True):
"""
Similar to :func:`os.walk`, yeields a 3-tuple ``(root, storage_items, stream_items)``
"""
if path is None:
path = self.root
root = self.find(path)
if not root.isdir():
raise ValueError("can only walk storage types")
if not root.child_id:
return
if topdown:
storage_items = []
stream_items = []
for item in self.listdir(root):
if item.isdir():
storage_items.append(item)
else:
stream_items.append(item)
yield root, storage_items, stream_items
for item in storage_items:
for root, storage_items, stream_items in self.walk(item):
yield root, storage_items, stream_items
else:
def topdown_visit_node(root):
storage_items = []
stream_items = []
for item in self.listdir(root):
if item.isdir():
for sub_root, sub_storage, sub_stream in topdown_visit_node(item):
yield sub_root, sub_storage, sub_stream
storage_items.append(item)
else:
stream_items.append(item)
yield root, storage_items, stream_items
for root_item, storage, stream in topdown_visit_node(root):
yield root_item, storage, stream
def exists(self, path):
"""
Return ``True`` if path refers to a existing path.
"""
if self.find(path) is None:
return False
return True
def makedir(self, path, class_id=None):
"""
Create a storage DirEntry name path
"""
return self.create_dir_entry(path, dir_type='storage', class_id=class_id)
def makedirs(self, path):
"""
Recursive storage DirEntry creation function.
"""
root = ""
assert path.startswith('/')
p = path.strip('/')
for item in p.split('/'):
root += "/" + item
if not self.exists(root):
self.makedir(root)
return self.find(path)
def open(self, path, mode='r'):
"""Open stream, returning ``Stream`` object"""
entry = self.find(path)
if entry is None:
if mode == 'r':
raise ValueError("stream does not exists: %s" % path)
entry = self.create_dir_entry(path, 'stream', None)
else:
if not entry.isfile():
raise ValueError("can only open stream type DirEntry's")
if mode == 'w':
logging.debug("stream: %s exists, overwriting" % path)
self.free_fat_chain(entry.sector_id, entry.byte_size < self.min_stream_max_size)
entry.sector_id = None
entry.byte_size = 0
entry.class_id = None
elif mode == 'rw':
pass
s = Stream(self, entry, mode)
return s
|
markreidvfx/pyaaf2
|
aaf2/cfb.py
|
CompoundFileBinary.open
|
python
|
def open(self, path, mode='r'):
entry = self.find(path)
if entry is None:
if mode == 'r':
raise ValueError("stream does not exists: %s" % path)
entry = self.create_dir_entry(path, 'stream', None)
else:
if not entry.isfile():
raise ValueError("can only open stream type DirEntry's")
if mode == 'w':
logging.debug("stream: %s exists, overwriting" % path)
self.free_fat_chain(entry.sector_id, entry.byte_size < self.min_stream_max_size)
entry.sector_id = None
entry.byte_size = 0
entry.class_id = None
elif mode == 'rw':
pass
s = Stream(self, entry, mode)
return s
|
Open stream, returning ``Stream`` object
|
train
|
https://github.com/markreidvfx/pyaaf2/blob/37de8c10d3c3495cc00c705eb6c5048bc4a7e51f/aaf2/cfb.py#L1864-L1887
|
[
"def free_fat_chain(self, start_sid, minifat=False):\n fat =self.fat\n if minifat:\n fat = self.minifat\n\n for sid in self.get_fat_chain(start_sid, minifat):\n fat[sid] = FREESECT\n if minifat:\n self.minifat_freelist.insert(0, sid)\n else:\n self.fat_freelist.insert(0, sid)\n",
"def create_dir_entry(self, path, dir_type='storage', class_id=None):\n\n if self.exists(path):\n raise ValueError(\"%s already exists\" % path)\n\n dirname = os.path.dirname(path)\n basename = os.path.basename(path)\n\n root = self.find(dirname)\n\n if root is None:\n raise ValueError(\"parent dirname does not exist: %s\" % dirname)\n\n if not root.type in ('storage', 'root storage'):\n raise ValueError(\"can not add entry to non storage type\")\n\n dir_id = self.next_free_dir_id()\n logging.debug(\"next dir id %d\" % dir_id)\n\n entry = DirEntry(self, dir_id)\n entry.name = basename\n entry.type = dir_type\n entry.class_id = class_id\n\n # TODO: Implement a Red Black tree\n # all new DirEntries are black, so there is no tree balancing.\n # AAF Low-Level Container Specification says its alright to do this.\n\n entry.color = 'black'\n\n root.add_child(entry)\n self.dir_cache[dir_id] = entry\n\n return entry\n",
"def find(self, path):\n \"\"\"\n find a ``DirEntry`` located at *path*. Returns ``None`` if path\n does not exist.\n \"\"\"\n\n if isinstance(path, DirEntry):\n return path\n\n if path == \"/\":\n return self.root\n\n split_path = path.lstrip('/').split(\"/\")\n\n i = 0\n root = self.root\n\n while True:\n\n children = self.listdir_dict(root)\n match = children.get(split_path[i], None)\n\n if match:\n if i == len(split_path) - 1:\n return match\n root = match\n i += 1\n else:\n return None\n"
] |
class CompoundFileBinary(object):
def __init__(self, file_object, mode='rb', sector_size=4096):
self.f = file_object
self.difat = [[]]
self.fat = array(str('I'))
self.fat_freelist = []
self.minifat = array(str('I'))
self.minifat_freelist = []
self.difat_chain = []
self.minifat_chain = []
self.dir_fat_chain = []
self.mini_stream_chain = []
self.modified = {}
self.sector_cache = LRUCacheDict()
self.dir_cache = weakref.WeakValueDictionary()
self.children_cache = LRUCacheDict()
self.dir_freelist = []
self.debug_grow = False
self.is_open = True
if isinstance(self.f, BytesIO):
self.mode = 'wb+'
else:
self.mode = mode
if self.mode in ("r", "r+", "rb", 'rb+'):
self.read_header()
self.read_fat()
mini_stream_byte_size = self.read_minifat()
# create dir_fat_chain and read root dir entry
self.dir_fat_chain = self.get_fat_chain(self.dir_sector_start)
if len(self.dir_fat_chain) != self.dir_sector_count:
logging.info("read dir_sector_count missmatch, using fat chain length")
self.dir_sector_count = len(self.dir_fat_chain)
logging.debug("read %d dir sectors" % len(self.dir_fat_chain))
self.root = self.read_dir_entry(0)
self.dir_cache[0] = self.root
# create mini stream fat chain
if self.minifat_sector_count:
self.mini_stream_chain = self.get_fat_chain(self.root.sector_id)
if self.root.sector_id is not None and mini_stream_byte_size != self.root.byte_size:
message = "mini stream size missmatch: %d != %d, using size from minifat"
logging.warn(message % (self.root.byte_size, mini_stream_byte_size))
else:
self.setup_empty(sector_size)
self.write_header()
logging.debug("pos: %d" % self.f.tell())
logging.debug("writing root dir sector")
self.root.write()
self.f.write(bytearray(self.sector_size - 128))
self.write_fat()
def close(self):
if self.mode in ("r", "rb"):
return
# caculate mini stream size
if self.root.sector_id is not None:
# I cannot find this documented anywhere but the size of the mini stream
# is the size up to the last mini sector is uses. Not the total Non FREESECT's.
# If self.root.byte_size is not set correctly the some appications will crash hard...
# find last non-free sect
for i,v in enumerate(reversed(self.minifat)):
if v != FREESECT:
break
last_used_sector_id = len(self.minifat) - i
mini_stream_byte_size = (last_used_sector_id * self.mini_stream_sector_size)
self.root.byte_size = mini_stream_byte_size
# Truncate ministream
s = Stream(self, self.root, 'rw')
s.truncate(mini_stream_byte_size)
self.write_header()
self.write_difat()
self.write_fat()
self.write_minifat()
self.write_dir_entries()
# Truncate file to the last free sector
for i,v in enumerate(reversed(self.fat)):
if v != FREESECT:
break
last_used_sector_id = len(self.fat) - i
pos = (last_used_sector_id + 1) * self.sector_size
self.f.seek(pos)
self.f.truncate()
self.is_open = False
def setup_empty(self, sector_size):
if sector_size == 4096:
self.class_id = auid.AUID("0d010201-0200-0000-060e-2b3403020101")
elif sector_size == 512:
self.class_id = auid.AUID("42464141-000d-4d4f-060e-2b34010101ff")
else:
raise ValueError("sector size must be 4096 or 512")
self.major_version = 4
self.minor_version = 62
self.byte_order = "le"
self.sector_size = sector_size
self.mini_stream_sector_size = 64
self.dir_sector_count = 1
self.fat_sector_count = 1
self.dir_sector_start = 0
self.transaction_signature = 1
self.min_stream_max_size = 4096
self.minifat_sector_start = FREESECT
self.minifat_sector_count = 0
self.difat_sector_start = FREESECT
self.difat_sector_count = 0
self.difat = [[]]
for i in range(109):
self.difat[0].append(FREESECT)
self.difat[0][0] = 1
for i in range(self.sector_size // 4):
self.fat.append(FREESECT)
if i > 1:
self.fat_freelist.append(i)
self.fat[0] = ENDOFCHAIN # end of dir chain
self.fat[self.difat[0][0]] = FATSECT
self.root = DirEntry(self, 0)
self.root.name = 'Root Entry'
self.root.sector_id = None
self.root.type = 'root storage'
self.root.class_id = auid.AUID("b3b398a5-1c90-11d4-8053-080036210804")
self.dir_cache[0] = self.root
self.dir_fat_chain = [0]
# raise NotImplementedError("mode: %s supported not implemented" % self.f.mode)
def write_header(self):
logging.debug("writiing header")
f = self.f
f.seek(0)
f.write(b'\xd0\xcf\x11\xe0\xa1\xb1\x1a\xe1') # Magic
f.write(self.class_id.bytes_le)
write_u16le(f, self.minor_version)
write_u16le(f, self.major_version)
write_u16le(f, 0xFFFE) #byte order le
write_u16le(f, int(math.log(self.sector_size, 2)))
write_u16le(f, int(math.log(self.mini_stream_sector_size, 2)))
f.write(b'\0' * 6) #skip reseverd
write_u32le(f, self.dir_sector_count)
write_u32le(f, self.fat_sector_count)
write_u32le(f, self.dir_sector_start)
write_u32le(f, self.transaction_signature)
write_u32le(f, self.min_stream_max_size)
write_u32le(f, self.minifat_sector_start)
write_u32le(f, self.minifat_sector_count)
write_u32le(f, self.difat_sector_start)
write_u32le(f, self.difat_sector_count)
for i in range(109):
write_u32le(f, self.difat[0][i])
for i in range(self.sector_size - f.tell()):
f.write(b'\0')
def read_header(self):
f = self.f
f.seek(0)
magic = f.read(8)
# logging.debug("magic: %s" % magic.encode("hex"))
logging.debug("magic: %s" % str([magic]))
# clsid = f.read(16)
# logging.debug("clsid: %s" % clsid.encode("hex"))
self.class_id = auid.AUID(bytes_le=f.read(16))
logging.debug("clsid: %s" % str(self.class_id))
self.minor_version = read_u16le(f)
logging.debug("minor_version: %d" % self.minor_version)
self.major_version = read_u16le(f)
logging.debug("major_version: %d" % self.major_version)
byte_order = read_u16le(f)
if byte_order == 0xFFFE:
self.byte_order = 'le'
else:
raise NotImplementedError("endian format:0x%X not supported" % byte_order)
logging.debug("byte_order: %s" % self.byte_order)
size = read_u16le(f)
self.sector_size = pow(2, size)
logging.debug("sector_size: %d -> %d" % (size, self.sector_size))
size = read_u16le(f)
self.mini_stream_sector_size = pow(2, size)
logging.debug("mini_stream_sector_size: %d -> %d" % (size, self.mini_stream_sector_size))
if not self.sector_size in (4096, 512):
raise NotImplementedError("unsupported sector size: %d" % self.sector_size)
if self.mini_stream_sector_size != 64:
raise NotImplementedError("unsupported mini sector size: %d" % self.mini_stream_sector_size)
f.read(6) #skip reseverd
self.dir_sector_count = read_u32le(f)
logging.debug("dir_sector_count: %d" % self.dir_sector_count)
self.fat_sector_count = read_u32le(f)
logging.debug("fat_sector_count: %d" % self.fat_sector_count)
self.dir_sector_start = read_u32le(f)
logging.debug("dir_sector_start: %d" % self.dir_sector_start)
self.transaction_signature = read_u32le(f)
logging.debug("transaction_signature: %d" % self.transaction_signature)
self.min_stream_max_size = read_u32le(f)
logging.debug("min_stream_max_size: %d" % self.min_stream_max_size)
self.minifat_sector_start = read_u32le(f)
logging.debug("minifat_sector_start: %d" % self.minifat_sector_start)
self.minifat_sector_count = read_u32le(f)
logging.debug("minifat_sector_count: %d" % self.minifat_sector_count)
self.difat_sector_start = read_u32le(f)
logging.debug("difat_sector_start: %d" % self.difat_sector_start)
self.difat_sector_count = read_u32le(f)
logging.debug("difat_sector_count: %d" % self.difat_sector_count)
self.difat = [[]]
logging.debug("reading header difat at %d" % f.tell())
for i in range(109):
item = read_u32le(f)
# item = fat_sector_types.get(item, item)
self.difat[0].append(item)
sectors_left = self.difat_sector_count
sid = self.difat_sector_start
# reading difat sectors
while sectors_left:
logging.debug("reading difat sid: %d", sid)
sector_type = fat_sector_types.get(sid, sid)
if not isinstance(sector_type, int):
break
self.difat_chain.append(sid)
f.seek((sid + 1) * self.sector_size)
difat = []
for i in range( (self.sector_size // 4)):
item = read_u32le(f)
difat.append(item)
self.difat.append(difat)
sid = difat[-1]
logging.debug("next difat: %d" % sid)
sectors_left -= 1
def iter_difat(self):
for i, sid in enumerate(self.difat[0]):
yield 0, i, sid
t = 1
for item in self.difat[1:]:
for i, sid in enumerate(item[:-1]):
yield t, i, sid
t+=1
def write_difat(self):
f = self.f
# write header entries
f.seek(76)
logging.debug("writing header difat")
for i in range(109):
write_u32le(f, self.difat[0][i])
for i in range(self.sector_size - f.tell()):
f.write(b'\0')
if self.difat_sector_count == 0:
return
sid = self.difat_sector_start
assert len(self.difat[1:]) == self.difat_sector_count
for table in self.difat[1:]:
sector_type = fat_sector_types.get(sid, sid)
if not isinstance(sector_type, int):
raise IOError("bad difat sector type")
pos = (sid + 1) * self.sector_size
logging.debug("writing difat to sid: %d at: %d" % (sid,pos))
f.seek(pos)
for i in range(self.sector_size // 4):
write_u32le(f, table[i])
sid = table[-1]
def read_fat(self):
f = self.f
self.fat = array(str('I'))
sector_count = 0
fat_sectors = []
for t, i, sid in self.iter_difat():
sector_type = fat_sector_types.get(sid, sid)
if not isinstance(sector_type, int):
continue
fat_sectors.append(sid)
# len(fat_sectors),self.fat_sector_count
# assert len(fat_sectors) == self.fat_sector_count
if len(fat_sectors) != self.fat_sector_count:
logging.warn("fat sector count missmatch difat: %d header: %d" % (len(fat_sectors), self.fat_sector_count))
self.fat_sector_count = len(fat_sectors)
for sid in fat_sectors:
pos = (sid + 1) * self.sector_size
f.seek(pos)
extend_sid_table(f, self.fat, self.sector_size)
sector_count += 1
if sys.byteorder == 'big':
self.fat.byteswap()
for i,v in enumerate(self.fat):
if v == FREESECT:
self.fat_freelist.append(i)
logging.debug("read %d fat sectors ", sector_count)
if self.sector_size == 4096 and len(self.fat) > RANGELOCKSECT:
if self.fat[RANGELOCKSECT] != ENDOFCHAIN:
logging.warn("range lock sector has data")
# logging.debug("fat: %s" % str(pretty_sectors(self.fat)))
def write_fat(self):
logging.debug("writing fat")
f = self.f
sector_count = 0
assert len(self.fat)*4 % self.sector_size == 0
fat_sectors = []
for t, i, sid in self.iter_difat():
sector_type = fat_sector_types.get(sid, sid)
if not isinstance(sector_type, int):
continue
fat_sectors.append(sid)
# check that the difat has enough entries to hold the current fat
assert len(fat_sectors) == len(self.fat)*4 // self.sector_size
element_count = self.sector_size // 4
fat_table_struct = Struct(str('<%dI' % element_count))
for i, sid in enumerate(fat_sectors):
# logging.debug("writing fat to sid: %d" % sid)
f.seek((sid + 1) * self.sector_size)
start = i * element_count
end = start + element_count
f.write(fat_table_struct.pack(*self.fat[start:end]))
def read_minifat(self):
f = self.f
sector_count = 0
self.minifat = array(str('I'))
for sid in self.get_fat_chain(self.minifat_sector_start):
self.minifat_chain.append(sid)
f.seek((sid + 1) * self.sector_size)
extend_sid_table(f, self.minifat, self.sector_size)
sector_count += 1
if sys.byteorder == 'big':
self.minifat.byteswap()
# mini_stream_byte_size = 0
last_used_sector = 0
for i,v in enumerate(self.minifat):
if v == FREESECT:
self.minifat_freelist.append(i)
else:
last_used_sector = i
# mini_stream_byte_size += self.mini_stream_sector_size
mini_stream_byte_size = ((last_used_sector+1) * self.mini_stream_sector_size)
# for i, sect in enumerate(pretty_sectors(self.minifat)):
# print(i, sect)
logging.debug("read %d mini fat sectors", sector_count)
return mini_stream_byte_size
def write_minifat(self):
f = self.f
sector_count = 0
element_count = self.sector_size // 4
fat_table_struct = Struct(str('<%dI' % element_count))
for i, sid in enumerate(self.get_fat_chain(self.minifat_sector_start)):
pos = (sid + 1) * self.sector_size
f.seek(pos)
start = i * element_count
end = start + element_count
f.write(fat_table_struct.pack(*self.minifat[start:end]))
def write_modified_dir_entries(self):
f = self.f
for dir_id in sorted(self.modified):
entry = self.modified[dir_id]
stream_pos = entry.dir_id * 128
chain_index = stream_pos // self.sector_size
sid_offset = stream_pos % self.sector_size
sid = self.dir_fat_chain[chain_index]
pos = ((sid + 1) * self.sector_size) + sid_offset
f.seek(pos)
f.write(entry.data)
# invalidate sector
if sid in self.sector_cache:
del self.sector_cache[sid]
self.modified = {}
def write_dir_entries(self):
self.write_modified_dir_entries()
# clear empty DirEntrys
empty_dir = bytearray(128)
f = self.f
self.dir_freelist.sort()
for dir_id in self.dir_freelist:
stream_pos = dir_id * 128
chain_index = stream_pos // self.sector_size
sid_offset = stream_pos % self.sector_size
sid = self.dir_fat_chain[chain_index]
pos = ((sid + 1) * self.sector_size) + sid_offset
f.seek(pos)
f.write(empty_dir)
def next_free_minifat_sect(self):
idx_per_sect = self.sector_size // self.mini_stream_sector_size
stream_sects = len(self.mini_stream_chain) * idx_per_sect
if self.minifat_freelist:
i = self.minifat_freelist.pop(0)
assert self.minifat[i] == FREESECT
if i+1 > stream_sects:
self.mini_stream_grow()
return i
# if we got here need to add aditional fat
sid = self.next_free_sect()
# logging.warn("growing minifat to sid %d" % sid)
idx_start = len(self.minifat)
idx_end = idx_start + self.sector_size // 4
self.minifat.extend([FREESECT for i in range(idx_start, idx_end)])
self.minifat_freelist.extend([i for i in range(idx_start, idx_end)])
if self.minifat_sector_count == 0:
self.minifat_sector_count = 1
self.minifat_sector_start = sid
else:
self.minifat_sector_count += 1
self.fat[self.minifat_chain[-1]] = sid
self.minifat_chain.append(sid)
self.fat[sid] = ENDOFCHAIN
return self.next_free_minifat_sect()
def next_free_sect(self):
if self.fat_freelist:
# print("using fat free list")
i = self.fat_freelist.pop(0)
assert self.fat[i] == FREESECT
# Handle Range Lock Sector
if i == RANGELOCKSECT and self.sector_size == 4096:
self.fat[i] = ENDOFCHAIN
logging.warning("range lock sector in fat freelist, marking ENDOFCHAIN")
return self.next_free_sect()
return i
# if we got here need to add aditional fat
# logging.debug("fat full, growing")
difat_table = None
difat_index = None
for t, i, v in self.iter_difat():
if v == FREESECT:
difat_table = t
difat_index = i
break
new_difat_sect = None
if difat_index is None:
new_difat_sect = len(self.fat) + 1
logging.debug("adding new difat to sid: %d" % new_difat_sect)
if self.difat_sector_count == 0:
self.difat_sector_start = new_difat_sect
self.difat_sector_count = 1
else:
self.difat[-1][-1] = new_difat_sect
self.difat_sector_count += 1
# add difat table
difat = []
for i in range(self.sector_size // 4):
difat.append(FREESECT)
difat[-1] == ENDOFCHAIN
self.difat.append(difat)
for t, i, v in self.iter_difat():
if v == FREESECT:
difat_table = t
difat_index = i
break
new_fat_sect = len(self.fat)
# logging.debug("adding new fat to sid: %d" % new_fat_sect)
self.difat[difat_table][difat_index] = new_fat_sect
# grow fat entries
idx_start = len(self.fat)
idx_end = idx_start + (self.sector_size // 4)
self.fat.extend([FREESECT for i in range(self.sector_size // 4)])
non_free_sids = set([new_fat_sect, new_difat_sect])
# Handle Range Lock Sector
# The range lock sector is the sector
# that covers file offsets 0x7FFFFF00-0x7FFFFFFF in the file
if RANGELOCKSECT < idx_end and RANGELOCKSECT > idx_start and self.sector_size == 4096:
non_free_sids.add(RANGELOCKSECT)
logging.debug("adding range lock")
self.fat[RANGELOCKSECT] = ENDOFCHAIN
freelist = [i for i in range(idx_start, idx_end) if i not in non_free_sids]
self.fat_freelist.extend(freelist)
self.fat[new_fat_sect] = FATSECT
self.fat_sector_count += 1
if not new_difat_sect is None:
self.fat[new_difat_sect] = DIFSECT
return self.next_free_sect()
def read_sector_data(self, sid):
sector_data = self.sector_cache.get(sid, None)
if sector_data is not None:
return sector_data
else:
pos = (sid + 1) * self.sector_size
self.f.seek(pos)
sector_data = bytearray(self.sector_size)
#NOTE: if requested sector doesn't exsit or
# is truncated will padd with zeros, expected behavour
bytes_read = self.f.readinto(sector_data)
self.sector_cache[sid] = sector_data
return sector_data
def get_sid_offset(self, abs_pos):
sid, sid_offset = divmod(abs_pos, self.sector_size)
return sid-1, sid_offset
def dir_entry_sid_offset(self, dir_id):
stream_pos = dir_id * 128
chain_index, sid_offset = divmod(stream_pos, self.sector_size)
sid = self.dir_fat_chain[chain_index]
return sid, sid_offset
def dir_entry_pos(self, dir_id):
sid, sid_offset = self.dir_entry_sid_offset(dir_id)
pos = ((sid + 1) * self.sector_size) + sid_offset
return pos
def read_dir_entry(self, dir_id, parent = None):
if dir_id is None:
return None
entry = self.dir_cache.get(dir_id, None)
if entry is not None:
return entry
# assert not dir_id in self.dir_freelist
stream_pos = dir_id * 128
chain_index = stream_pos // self.sector_size
sid_offset = stream_pos % self.sector_size
sid = self.dir_fat_chain[chain_index]
sector_data = self.read_sector_data(sid)
data= bytearray(sector_data[sid_offset:sid_offset+128])
entry = DirEntry(self, dir_id, data=data)
entry.parent = parent
self.dir_cache[dir_id] = entry
return entry
def clear_sector(self, sid):
sector_pos = (sid + 1) * self.sector_size
self.f.seek(sector_pos)
# for i in range(self.sector_size):
self.f.write(bytearray(self.sector_size))
def next_free_dir_id(self):
# use free list first
if self.dir_freelist:
return self.dir_freelist.pop(0)
f = self.f
sect = self.fat_chain_append(self.dir_fat_chain[-1])
self.dir_fat_chain.append(sect)
self.dir_sector_count += 1
first_dir_id = (len(self.dir_fat_chain) - 1) * self.sector_size // 128
last_dir_id = first_dir_id + (self.sector_size // 128)
self.dir_freelist.extend(range(first_dir_id, last_dir_id))
return self.next_free_dir_id()
def get_fat_chain(self, start_sid, minifat=False):
fat = self.fat
fat_name = "FAT"
if minifat:
fat = self.minifat
fat_name = "MINIFAT"
# Floyd's Tortoise and Hare cycle-finding algorithm
a = start_sid
b = start_sid
sectors = []
if start_sid in (None, ENDOFCHAIN, FREESECT, DIFSECT, FATSECT):
return []
while b != ENDOFCHAIN:
sectors.append(b)
b = fat[b]
if a != ENDOFCHAIN:
a = fat[a]
if a != ENDOFCHAIN:
a = fat[a]
if a == b:
raise CompoundFileBinaryError('cyclic %s fat chain found starting at %d' % (fat_name, start_sid))
return sectors
def mini_stream_grow(self):
sid = self.next_free_sect()
# logging.debug("adding to mini stream fat sid: %d" % sid)
if not self.mini_stream_chain:
self.mini_stream_chain = [sid]
self.root.sector_id = sid
else:
self.fat[self.mini_stream_chain[-1]] = sid
self.mini_stream_chain.append(sid)
self.fat[sid] = ENDOFCHAIN
def fat_chain_append(self, start_sid, minifat=False):
if minifat:
sect = self.next_free_minifat_sect()
# logging.debug("creating new mini sector: %d" % sect)
fat = self.minifat
else:
sect = self.next_free_sect()
# logging.debug("creating new sector: %d" % sect)
fat = self.fat
if start_sid is None:
fat[sect] = ENDOFCHAIN
else:
fat_chain = self.get_fat_chain(start_sid, minifat)
assert fat_chain
fat[fat_chain[-1]] = sect
fat[sect] = ENDOFCHAIN
return sect
def free_fat_chain(self, start_sid, minifat=False):
fat =self.fat
if minifat:
fat = self.minifat
for sid in self.get_fat_chain(start_sid, minifat):
fat[sid] = FREESECT
if minifat:
self.minifat_freelist.insert(0, sid)
else:
self.fat_freelist.insert(0, sid)
def create_dir_entry(self, path, dir_type='storage', class_id=None):
if self.exists(path):
raise ValueError("%s already exists" % path)
dirname = os.path.dirname(path)
basename = os.path.basename(path)
root = self.find(dirname)
if root is None:
raise ValueError("parent dirname does not exist: %s" % dirname)
if not root.type in ('storage', 'root storage'):
raise ValueError("can not add entry to non storage type")
dir_id = self.next_free_dir_id()
logging.debug("next dir id %d" % dir_id)
entry = DirEntry(self, dir_id)
entry.name = basename
entry.type = dir_type
entry.class_id = class_id
# TODO: Implement a Red Black tree
# all new DirEntries are black, so there is no tree balancing.
# AAF Low-Level Container Specification says its alright to do this.
entry.color = 'black'
root.add_child(entry)
self.dir_cache[dir_id] = entry
return entry
def free_dir_entry(self, entry):
# add freelist
self.dir_freelist.append(entry.dir_id)
# remove from dir caches
if entry.dir_id in self.dir_cache:
del self.dir_cache[entry.dir_id]
if entry.dir_id in self.children_cache:
del self.children_cache[entry.dir_id]
if entry.dir_id in self.modified:
del self.modified[entry.dir_id]
entry.dir_id = None
def remove(self, path):
"""
Removes both streams and storage DirEntry types from file.
storage type entries need to be empty dirs.
"""
entry = self.find(path)
if not entry:
raise ValueError("%s does not exists" % path)
if entry.type == 'root storage':
raise ValueError("can no remove root entry")
if entry.type == "storage" and not entry.child_id is None:
raise ValueError("storage contains children")
entry.pop()
# remove stream data
if entry.type == "stream":
self.free_fat_chain(entry.sector_id, entry.byte_size < self.min_stream_max_size)
self.free_dir_entry(entry)
def rmtree(self, path):
"""
Removes directory structure, similar to shutil.rmtree.
"""
for root, storage, streams in self.walk(path, topdown=False):
for item in streams:
self.free_fat_chain(item.sector_id, item.byte_size < self.min_stream_max_size)
self.free_dir_entry(item)
for item in storage:
self.free_dir_entry(item)
root.child_id = None
# remove root item
self.remove(path)
def listdir(self, path = None):
"""
Return a list containing the ``DirEntry`` objects in the directory
given by path.
"""
result = self.listdir_dict(path)
return result.values()
def listdir_dict(self, path = None):
"""
Return a dict containing the ``DirEntry`` objects in the directory
given by path with name of the dir as key.
"""
if path is None:
path = self.root
root = self.find(path)
if root is None:
raise ValueError("unable to find dir: %s" % str(path))
if not root.isdir():
raise ValueError("can only list storage types")
children = self.children_cache.get(root.dir_id, None)
if children is not None:
return children
child = root.child()
result = {}
if not child:
self.children_cache[root.dir_id] = result
return result
dir_per_sector = self.sector_size // 128
max_dirs_entries = self.dir_sector_count * dir_per_sector
stack = deque([child])
count = 0
while stack:
current = stack.pop()
result[current.name] = current
count += 1
if count > max_dirs_entries:
raise CompoundFileBinaryError("corrupt folder structure")
left = current.left()
if left:
stack.append(left)
right = current.right()
if right:
stack.append(right)
self.children_cache[root.dir_id] = result
return result
def find(self, path):
"""
find a ``DirEntry`` located at *path*. Returns ``None`` if path
does not exist.
"""
if isinstance(path, DirEntry):
return path
if path == "/":
return self.root
split_path = path.lstrip('/').split("/")
i = 0
root = self.root
while True:
children = self.listdir_dict(root)
match = children.get(split_path[i], None)
if match:
if i == len(split_path) - 1:
return match
root = match
i += 1
else:
return None
def walk(self, path = None, topdown=True):
"""
Similar to :func:`os.walk`, yeields a 3-tuple ``(root, storage_items, stream_items)``
"""
if path is None:
path = self.root
root = self.find(path)
if not root.isdir():
raise ValueError("can only walk storage types")
if not root.child_id:
return
if topdown:
storage_items = []
stream_items = []
for item in self.listdir(root):
if item.isdir():
storage_items.append(item)
else:
stream_items.append(item)
yield root, storage_items, stream_items
for item in storage_items:
for root, storage_items, stream_items in self.walk(item):
yield root, storage_items, stream_items
else:
def topdown_visit_node(root):
storage_items = []
stream_items = []
for item in self.listdir(root):
if item.isdir():
for sub_root, sub_storage, sub_stream in topdown_visit_node(item):
yield sub_root, sub_storage, sub_stream
storage_items.append(item)
else:
stream_items.append(item)
yield root, storage_items, stream_items
for root_item, storage, stream in topdown_visit_node(root):
yield root_item, storage, stream
def exists(self, path):
"""
Return ``True`` if path refers to a existing path.
"""
if self.find(path) is None:
return False
return True
def makedir(self, path, class_id=None):
"""
Create a storage DirEntry name path
"""
return self.create_dir_entry(path, dir_type='storage', class_id=class_id)
def makedirs(self, path):
"""
Recursive storage DirEntry creation function.
"""
root = ""
assert path.startswith('/')
p = path.strip('/')
for item in p.split('/'):
root += "/" + item
if not self.exists(root):
self.makedir(root)
return self.find(path)
def move(self, src, dst):
"""
Moves ``DirEntry`` from src to dst
"""
src_entry = self.find(src)
if src_entry is None:
raise ValueError("src path does not exist: %s" % src)
if dst.endswith('/'):
dst += src_entry.name
if self.exists(dst):
raise ValueError("dst path already exist: %s" % dst)
if dst == '/' or src == '/':
raise ValueError("cannot overwrite root dir")
split_path = dst.strip('/').split('/')
dst_basename = split_path[-1]
dst_dirname = '/' + '/'.join(split_path[:-1])
# print(dst)
# print(dst_basename, dst_dirname)
dst_entry = self.find(dst_dirname)
if dst_entry is None:
raise ValueError("src path does not exist: %s" % dst_dirname)
if not dst_entry.isdir():
raise ValueError("dst dirname cannot be stream: %s" % dst_dirname)
# src_entry.parent.remove_child(src_entry)
src_entry.pop()
src_entry.parent = None
src_entry.name = dst_basename
dst_entry.add_child(src_entry)
self.children_cache[dst_entry.dir_id][src_entry.name] = src_entry
return src_entry
|
markreidvfx/pyaaf2
|
aaf2/properties.py
|
add2set
|
python
|
def add2set(self, pid, key, value):
prop = self.property_entries[pid]
current = prop.objects.get(key, None)
current_local_key = prop.references.get(key, None)
if current and current is not value:
current.detach()
if current_local_key is None:
prop.references[key] = prop.next_free_key
prop.next_free_key += 1
prop.objects[key] = value
if prop.parent.dir:
ref = prop.index_ref_name(key)
dir_entry = prop.parent.dir.get(ref)
if dir_entry is None:
dir_entry = prop.parent.dir.makedir(ref)
if value.dir != dir_entry:
value.attach(dir_entry)
prop.mark_modified()
|
low level add to StrongRefSetProperty
|
train
|
https://github.com/markreidvfx/pyaaf2/blob/37de8c10d3c3495cc00c705eb6c5048bc4a7e51f/aaf2/properties.py#L1313-L1336
| null |
from __future__ import (
unicode_literals,
absolute_import,
print_function,
division,
)
from io import BytesIO
import weakref
import struct
from .utils import (
read_u8,
read_u16le,
read_u32le,
write_u8,
write_u16le,
write_u32le,
decode_utf16le,
encode_utf16le,
encode_utf16_array,
encode_auid_array,
encode_u16le,
encode_u32le,
encode_u8,
encode_s64le,
mangle_name,
)
from .auid import AUID
from .mobid import MobID
from .exceptions import AAFPropertyError, AAFAttachError
SF_DATA = 0x82
SF_DATA_STREAM = 0x42
SF_STRONG_OBJECT_REFERENCE = 0x22
SF_STRONG_OBJECT_REFERENCE_VECTOR = 0x32
SF_STRONG_OBJECT_REFERENCE_SET = 0x3A
SF_WEAK_OBJECT_REFERENCE = 0x02
SF_WEAK_OBJECT_REFERENCE_VECTOR = 0x12
SF_WEAK_OBJECT_REFERENCE_SET = 0x1A
SF_WEAK_OBJECT_REFERENCE_STORED_OBJECT_ID = 0x03
SF_UNIQUE_OBJECT_ID = 0x86
SF_OPAQUE_STREAM = 0x40
# not sure about these
SF_DATA_VECTOR = 0xD2
SF_DATA_SET = 0xDA
PROPERTY_VERSION=32
def writeonly(func):
def func_wrapper(self, *args, **kwargs):
if not self.writeable:
raise AAFPropertyError("file readonly")
result = func(self, *args, **kwargs)
self.mark_modified()
return result
return func_wrapper
CLASSDEF_AUID = AUID("0d010101-0101-0100-060e-2b3402060101")
TYPEDEF_AUID = AUID("0d010101-0203-0000-060e-2b3402060101")
MOB_MOBID_AUID = AUID("01011510-0000-0000-060e-2b3401010101")
ESSENCEDATA_MOBID_AUID = AUID("06010106-0100-0000-060e-2b3401010102")
class Property(object):
__slots__ = ('pid', 'format', 'version', 'data', 'parent', '_propertydef')
def __init__(self, parent, pid, format, version=PROPERTY_VERSION):
self.pid = pid
self.format = format
self.version = version
self.data = None
self._propertydef = None
self.parent = parent
def format_name(self):
return str(property_formats[self.format].__name__)
@property
def attached(self):
if self.parent.dir:
return True
return False
@property
def writeable(self):
if self.parent.root.mode in ('rb', ):
return False
return True
def decode(self):
pass
def mark_modified(self):
if self.attached:
self.parent.root.manager.add_modified(self.parent)
@property
def propertydef(self):
if self._propertydef:
return self._propertydef
classdef = self.parent.classdef
if classdef is None:
return
p = classdef.get_propertydef_from_pid(self.pid)
if p:
self._propertydef = p
return p
# fall back to slow method if early in the bootstraping process
# seems to be on ClassDefinitions
for p in classdef.all_propertydefs():
if p.pid == self.pid:
self._propertydef = p
return p
@property
def unique(self):
return self.propertydef.unique
@property
def name(self):
propertydef = self.propertydef
if propertydef:
return propertydef.property_name
@property
def typedef(self):
propertydef = self.propertydef
if propertydef:
return propertydef.typedef
def copy(self, parent):
p = self.__class__(parent, self.pid, self.format, version=PROPERTY_VERSION)
p.data = bytes(self.data)
return p
@property
def value(self):
d = self.data
if d is None:
return None
return self.typedef.decode(d)
@value.setter
@writeonly
def value(self, value):
if self.data is not None and self.parent.dir and self.unique:
if self.propertydef.auid == MOB_MOBID_AUID:
self.parent.root.content.mobs.swap_unique_key(self.value, value)
elif self.propertydef.auid == ESSENCEDATA_MOBID_AUID:
self.parent.root.content.essencedata.swap_unique_key(self.value, value)
else:
raise AAFPropertyError("cannot change unique property value of attached object")
if value is None:
self.remove_pid_entry()
return
self.data = self.typedef.encode(value)
self.add_pid_entry()
def add_pid_entry(self):
if not self.pid in self.parent.property_entries:
self.parent.property_entries[self.pid] = self
return self
def remove_pid_entry(self):
if self.pid in self.parent.property_entries:
del self.parent.property_entries[self.pid]
def __repr__(self):
name = self.name
if name:
return "<%s %s>" % (name, str(self.typedef))
else:
return "<%s %d bytes>" % (self.__class__.__name__, len(self.data))
class StreamProperty(Property):
__slots__ = ('stream_name', 'dir')
def __init__(self, parent, pid, format, version=PROPERTY_VERSION):
super(StreamProperty, self).__init__(parent, pid, format, version)
self.stream_name = None
self.dir = None
def copy(self, parent):
p = super(StreamProperty, self).copy(parent)
p.stream_name = self.stream_name
a = self.open('r')
b = p.open("w")
byte_size = a.dir.byte_size
read_size = self.parent.root.cfb.sector_size
# copy stream data
while byte_size > 0:
d = a.read(read_size)
b.write(d)
byte_size -= read_size
return p
def decode(self):
# first byte is endianess
assert self.data[0:1] == b'\x55' # unspecified
self.stream_name = decode_utf16le(self.data[1:])
def encode(self, data):
return b'\x55' + encode_utf16le(data)
def __repr__(self):
return "<%s %s>" % (self.__class__.__name__, str(self.stream_name))
def setup_stream(self):
if self.stream_name:
return
self.stream_name = mangle_name(self.propertydef.property_name, self.pid, 32)
self.data = self.encode(self.stream_name)
self.add_pid_entry()
def open(self, mode='r'):
self.setup_stream()
if mode == 'r':
if self.attached:
stream = self.parent.dir.get(self.stream_name)
else:
stream = self.dir
if not stream:
raise AAFPropertyError("cannot find stream: %s" % self.stream_name)
return stream.open(mode)
else:
if not self.writeable:
raise AAFPropertyError("file readonly")
if self.attached:
return self.parent.dir.touch(self.stream_name).open(mode)
else:
if self.dir is None:
tmp_dir = self.parent.root.manager.create_temp_dir()
stream = tmp_dir.touch(self.stream_name).open(mode)
self.dir = stream.dir
return stream
return self.dir.open(mode)
def detach(self):
if self.stream_name is None:
raise AAFAttachError("stream has no name")
stream = self.parent.dir.get(self.stream_name)
if not stream:
raise AAFAttachError("stream doesn't exists")
stream_path = stream.path()
tmp = self.parent.root.manager.create_temp_dir().path()
self.dir = self.parent.root.cfb.move(stream_path, tmp + "/" + self.stream_name)
def attach(self):
if self.dir is None:
return
if self.parent.dir is None:
raise AAFAttachError("stream parent not attached")
if self.stream_name is None:
raise AAFAttachError("stream has no name")
stream = self.parent.dir.get(self.stream_name)
if stream:
raise AAFAttachError("dest stream already exists")
stream_path = self.parent.dir.path() + "/" + self.stream_name
self.parent.root.cfb.move(self.dir.path(), stream_path)
self.dir = None
@property
def value(self):
return self.parent.dir.get(self.stream_name)
class StrongRefProperty(Property):
__slots__ = ('ref', 'objectref')
def __init__(self, parent, pid, format, version=PROPERTY_VERSION):
super(StrongRefProperty, self).__init__(parent, pid, format, version)
self.ref = None
self.objectref = None
@property
def object(self):
if self.objectref is None:
return None
elif isinstance(self.objectref, weakref.ref):
return self.objectref()
else:
return self.objectref
@object.setter
def object(self, value):
if value is None:
self.objectref = None
elif self.attached:
self.objectref = weakref.ref(value)
else:
self.objectref = value
def copy(self, parent):
p = super(StrongRefProperty, self).copy(parent)
p.ref = self.ref
dir_entry = parent.dir.get(p.ref)
if dir_entry is None:
dir_entry = parent.dir.makedir(p.ref)
p.object = self.value.copy(dir_entry)
return p
def decode(self):
self.ref = decode_utf16le(self.data)
def encode(self, data):
return encode_utf16le(data)
def __repr__(self):
return "<%s %s to %s>" % (self.name, self.__class__.__name__, str(self.ref))
@property
def value(self):
if self.object:
return self.object
dir_entry = self.parent.dir.get(self.ref)
obj = None
if dir_entry:
obj = self.parent.root.manager.read_object(dir_entry)
self.object = obj
return obj
@value.setter
@writeonly
def value(self, value):
if value is None:
self.remove_pid_entry()
return
typedef = self.typedef
ref_classdef = typedef.ref_classdef
if not ref_classdef.isinstance(value.classdef):
raise TypeError("must be instance of: %s" % ref_classdef.class_name)
if self.ref is None:
propdef = self.propertydef
self.ref = mangle_name(propdef.property_name, self.pid, 32)
self.data = self.encode(self.ref)
# before asigning new object detach old
if self.object:
self.object.detach()
self.object = None
self.object = value
if not self.pid in self.parent.property_entries:
self.parent.property_entries[self.pid] = self
self.attach()
def detach(self):
# convert to regular ref
self.objectref = self.value
def attach(self):
obj = self.value
if not self.object:
return
if not self.parent.dir:
return
dir_entry = self.parent.dir.get(self.ref)
if dir_entry is None:
dir_entry = self.parent.dir.makedir(self.ref)
if self.object.dir != dir_entry:
self.object.attach(dir_entry)
# convert to weakref
self.objectref = weakref.ref(obj)
class StrongRefVectorProperty(Property):
__slots__ = ('references', 'next_free_key', 'last_free_key','objects', '_index_name')
def __init__(self, parent, pid, format, version=PROPERTY_VERSION):
super(StrongRefVectorProperty, self).__init__(parent, pid, format, version)
self.references = []
self._index_name = None
# self.ref = None
self.next_free_key = 0
self.last_free_key = 0xFFFFFFFF
if self.attached:
self.objects = weakref.WeakValueDictionary()
else:
self.objects = {}
def copy(self, parent):
p = super(StrongRefVectorProperty, self).copy(parent)
p.references = list(self.references)
p.objects = {}
p.index_name = self.index_name
p.next_free_key = self.next_free_key
p.last_free_key = self.last_free_key
p.data = self.data
for i, value in enumerate(self):
ref = self.index_ref_name(self.references[i])
dir_entry = parent.dir.get(ref)
if dir_entry is None:
dir_entry = parent.dir.makedir(ref)
c = value.copy(dir_entry)
p.objects[i] = c
return p
@property
def index_name(self):
if self._index_name:
return self._index_name
propdef = self.propertydef
name = mangle_name(propdef.property_name, self.pid, 32-10)
self.data = self.encode(name)
self._index_name = name
return name
@index_name.setter
def index_name(self, value):
self._index_name = value
def encode(self, data):
return encode_utf16le(data)
def decode(self):
self.index_name = decode_utf16le(self.data)
def read_index(self):
index_name = self.index_name + " index"
index_dir = self.parent.dir.get(index_name)
if not index_dir:
raise AAFPropertyError("cannot find index stream: %s" % index_name)
s = index_dir.open('r')
# read the whole index
f = BytesIO(s.read())
count = read_u32le(f)
self.next_free_key = read_u32le(f)
self.last_free_key = read_u32le(f)
pack_fmt = str("<%dI" % count)
self.references = list(struct.unpack(pack_fmt, f.read(4 * count)))
@writeonly
def write_index(self):
s = self.parent.dir.touch(self.index_name + " index").open(mode='rw')
f = BytesIO()
count = len(self.references)
write_u32le(f, count)
write_u32le(f, self.next_free_key)
write_u32le(f, self.last_free_key)
for local_key in self.references:
write_u32le(f, local_key)
s.write(f.getvalue())
s.truncate()
@property
def ref_classdef(self):
return self.typedef.element_typedef.ref_classdef
def index_ref_name(self, index):
return "%s{%x}" % (self.index_name, self.references[index])
def get(self, index, default=None):
if index >= len(self.references):
return default
if index < 0:
index = max(0, len(self.references) + index)
item = self.objects.get(index, None)
if item:
return item
ref = self.index_ref_name(index)
dir_entry = self.parent.dir.get(ref)
item = self.parent.root.manager.read_object(dir_entry)
self.objects[index] = item
return item
def __iter__(self):
for i in range(len(self.references)):
yield self.get(i)
def __len__(self):
return len(self.references)
def __getitem__(self, index):
item = self.get(index, None)
if item is None:
raise IndexError(index)
return item
@writeonly
def __setitem__(self, index, value):
if index < 0:
index = max(0, len(self) + index)
if index >= len(self):
raise IndexError("StrongRefVectorProperty assignment index out of range")
if value.dir:
raise AAFAttachError("object already attached")
obj = self.get(index, None)
if obj:
obj.detach()
self.objects[index] = value
self.attach()
@writeonly
def clear(self):
for obj in self:
obj.detach()
self.next_free_key = 0
self.references = []
if self.attached:
self.objects = weakref.WeakValueDictionary()
else:
self.objects = {}
@writeonly
def pop(self, index):
obj = self.get(index, None)
if obj is None:
raise IndexError(index)
if index < 0:
index = max(0, len(self) + index)
self.references.pop(index)
# decrement all cached object with > index -1
objects = {}
for key, value in self.objects.items():
if key == index:
item = value
elif key > index:
objects[key-1] = value
else:
objects[key] = value
if self.attached:
self.objects = weakref.WeakValueDictionary(objects)
else:
self.objects = objects
assert obj is item
obj.detach()
return obj
@writeonly
def insert(self, index, value):
assert self.ref_classdef.isinstance(value.classdef)
self.references.insert(index, self.next_free_key)
objects = {}
objects[index] = value
# increment all cached object with > indices +1
for key, value in self.objects.items():
if key >= index:
objects[key+1] = value
else:
objects[key] = value
self.next_free_key += 1
if self.attached:
self.objects = weakref.WeakValueDictionary(objects)
else:
self.objects = objects
self.attach()
@writeonly
def extend(self, value):
index_name = self.index_name # sets self.data
ref_classdef = self.ref_classdef
for obj in value:
assert ref_classdef.isinstance(obj.classdef)
if obj.dir:
raise AAFAttachError("object already attached")
for obj in value:
i = len(self.references)
self.references.append(self.next_free_key)
self.objects[i] = obj
self.next_free_key += 1
self.add_pid_entry()
self.attach()
def append(self, value):
self.extend([value])
@property
def value(self):
return [item for item in self]
@value.setter
@writeonly
def value(self, value):
if value is None:
self.remove_pid_entry()
return
self.clear()
self.extend(value)
def detach(self):
objects = {}
for i, obj in enumerate(self):
objects[i] = obj
self.objects = objects
def attach(self):
if not self.parent.dir:
return
for i, obj in enumerate(self):
ref = self.index_ref_name(i)
dir_entry = self.parent.dir.get(ref)
if dir_entry is None:
dir_entry = self.parent.dir.makedir(ref)
if obj.dir != dir_entry:
obj.attach(dir_entry)
def __repr__(self):
return "<%s %s to %s %d items>" % (self.name, self.__class__.__name__, str(self.index_name), len(self.references))
class StrongRefSetProperty(Property):
__slots__ = ('references', 'index_name', 'next_free_key', 'last_free_key', 'key_pid', 'key_size', 'objects')
def __init__(self, parent, pid, format, version=PROPERTY_VERSION):
super(StrongRefSetProperty, self).__init__(parent, pid, format, version)
self.references = {}
self.index_name = None
self.next_free_key = 0
self.last_free_key = 0xFFFFFFFF
# Pid of the referenced objects unique_key
self.key_pid = None
self.key_size = None
if self.attached:
self.objects = weakref.WeakValueDictionary()
else:
self.objects = {}
def copy(self, parent):
p = super(StrongRefSetProperty, self).copy(parent)
p.references = dict(self.references)
p.next_free_key = self.next_free_key
p.last_free_key = self.last_free_key
p.key_size = self.key_size
p.index_name = self.index_name
p.key_pid = self.key_pid
for key, value in self.items():
ref = self.index_ref_name(key)
dir_entry = parent.dir.get(ref)
if dir_entry is None:
dir_entry = parent.dir.makedir(ref)
p.objects[key] = value.copy(dir_entry)
return p
def encode(self, data):
return encode_utf16le(data)
def decode(self):
self.index_name = decode_utf16le(self.data)
def read_index(self):
index_name = self.index_name + " index"
index_dir = self.parent.dir.get(index_name)
if not index_dir:
raise AAFPropertyError("cannot find index stream: %s" % index_name)
s = index_dir.open('r')
# read the whole of the index
f = BytesIO(s.read())
count = read_u32le(f)
self.next_free_key = read_u32le(f)
self.last_free_key = read_u32le(f)
self.key_pid = read_u16le(f)
self.key_size = read_u8(f)
assert self.key_size in (16, 32)
fmt = ''.join((
'I', # local_key
'I', # ref_count
'%ds' % self.key_size))
index_fmt = struct.Struct(str('<' + fmt * count))
index_data = index_fmt.unpack(f.read())
for i in range(count):
index = i * 3
local_key = index_data[index + 0]
ref_count = index_data[index + 1]
key = index_data[index + 2]
# not sure if ref count is actually used
# doesn't apear to be
assert ref_count == 1
if self.key_size == 16:
key = AUID(bytes_le=key)
else:
key = MobID(bytes_le=key)
self.references[key] = local_key
@writeonly
def write_index(self):
s = self.parent.dir.touch(self.index_name + " index").open(mode='rw')
f = BytesIO()
count = len(self.references)
write_u32le(f, count)
write_u32le(f, self.next_free_key)
write_u32le(f, self.last_free_key)
write_u16le(f, self.key_pid)
write_u8(f, self.key_size)
for key, local_key in self.references.items():
write_u32le(f, local_key)
write_u32le(f, 1)
f.write(key.bytes_le)
s.write(f.getvalue())
s.truncate()
def index_ref_name(self, key):
return "%s{%x}" % (self.index_name, self.references[key])
def read_object(self, key):
obj = self.objects.get(key, None)
if obj:
return obj
ref = self.index_ref_name(key)
dir_entry = self.parent.dir.get(ref)
obj = self.parent.root.manager.read_object(dir_entry)
self.objects[key] = obj
return obj
def __contains__(self, key):
return key in self.references
def items(self):
for key in self.references:
obj = self.read_object(key)
yield (key, obj)
def values(self):
for key, obj in self.items():
yield obj
def __iter__(self):
return self.values()
def __len__(self):
return len(self.references)
def get_object(self, key):
for obj in self.value:
if obj.name == key:
return obj
def get(self, key, default=None):
if key not in self:
return default or self.get_object(key)
return self.read_object(key)
def __getitem__(self, key):
result = self.get(key, default=None)
if result is None:
raise KeyError(key)
return result
@writeonly
def swap_unique_key(self, old_key, new_key):
obj = self.get(old_key)
if obj is None:
raise ValueError("invalid key: %s" % str(old_key))
# remove reference
self.objects.pop(old_key)
local_key = self.references.pop(old_key)
self.references[new_key] = local_key
self.objects[new_key] = obj
obj.unique_property.data = new_key.bytes_le
@writeonly
def extend(self, values):
typedef = self.typedef
classdef = typedef.ref_classdef
# check values are the correct type
for item in values:
if not classdef.isinstance(item.classdef):
raise TypeError("Invalid Value")
if item.dir:
raise AAFAttachError("object already attached")
if self.key_pid is None:
self.key_pid = classdef.unique_key_pid
if self.key_size is None:
self.key_size = classdef.unique_key_size
if self.index_name is None:
propdef = self.propertydef
self.index_name = mangle_name(propdef.property_name, self.pid, 32-10)
self.data = self.encode(self.index_name)
for item in values:
key = item.unique_key
assert key is not None
current = self.objects.get(key, None)
current_local_key = self.references.get(key, None)
if current and current is not item:
current.detach()
if current_local_key is None:
self.references[key] = self.next_free_key
self.next_free_key += 1
self.objects[key] = item
if self.parent.dir:
ref = self.index_ref_name(key)
dir_entry = self.parent.dir.get(ref)
if dir_entry is None:
dir_entry = self.parent.dir.makedir(ref)
if item.dir != dir_entry:
item.attach(dir_entry)
self.add_pid_entry()
def append(self, value):
self.extend([value])
@writeonly
def clear(self):
for item in self.values():
item.detach()
self.references = {}
if self.attached:
self.objects = weakref.WeakValueDictionary()
else:
self.objects = {}
self.next_free_key = 0
@writeonly
def pop(self, key):
obj = self.get(key)
if obj is None:
raise KeyError(key)
self.references.pop(key)
self.objects.pop(key)
obj.detach()
return obj
@property
def value(self):
return [item for item in self]
@value.setter
@writeonly
def value(self, value):
if value is None:
self.remove_pid_entry()
return
self.clear()
if isinstance(value, dict):
value = value.values()
self.extend(value)
return
def detach(self):
objects = {}
for key, value in self.items():
objects[key] = value
self.objects = objects
def attach(self):
if not self.parent.dir:
return
for key in self.references:
obj = self.objects.get(key, None)
if not obj:
continue
ref = self.index_ref_name(key)
dir_entry = self.parent.dir.get(ref)
if dir_entry is None:
dir_entry = self.parent.dir.makedir(ref)
if obj.dir != dir_entry:
obj.attach(dir_entry)
def __repr__(self):
return "<%s to %s %d items>" % (self.__class__.__name__, str(self.index_name), len(self.references))
def resolve_weakref(p, ref):
ref_class_id = p.ref_classdef.auid
if ref_class_id == CLASSDEF_AUID:
return p.parent.root.metadict.lookup_classdef(ref)
elif ref_class_id == TYPEDEF_AUID:
return p.parent.root.metadict.lookup_typedef(ref)
else:
return p.parent.root.resovle_weakref(p.weakref_index, p.key_pid, ref)
class WeakRefProperty(Property):
__slots__ = ('weakref_index', 'key_pid', 'key_size', 'ref')
def __init__(self, parent, pid, format, version=PROPERTY_VERSION):
super(WeakRefProperty, self).__init__(parent, pid, format, version)
self.weakref_index = None
self.key_pid = None
self.key_size = None
self.ref = None
def copy(self, parent):
p = super(WeakRefProperty, self).copy(parent)
p.weakref_index = self.weakref_index
p.key_pid = self.pid
p.key_size = self.key_size
p.ref = self.ref
return p
def decode(self):
f = BytesIO(self.data)
self.weakref_index = read_u16le(f)
self.key_pid = read_u16le(f)
self.key_size = read_u8(f)
assert self.key_size in (16, 32)
if self.key_size == 16:
self.ref = AUID(bytes_le=f.read(self.key_size))
else:
self.ref = key = MobID(bytes_le=f.read(self.key_size))
def encode(self):
f = BytesIO()
ref = self.ref.bytes_le
key_size = len(ref)
assert key_size in (16, 32)
write_u16le(f, self.weakref_index)
write_u16le(f, self.key_pid)
write_u8(f, key_size)
f.write(ref)
return f.getvalue()
def __repr__(self):
return "<%s %s index %s %s>" % (self.name, self.__class__.__name__, self.weakref_index, self.ref)
@property
def ref_classdef(self):
return self.typedef.ref_classdef
@property
def value(self):
return resolve_weakref(self, self.ref)
@property
def pid_path(self):
return self.typedef.pid_path
@value.setter
@writeonly
def value(self, value):
if value is None:
self.remove_pid_entry()
return
ref_classdef = self.ref_classdef
assert ref_classdef.isinstance(value.classdef)
if self.key_pid is None:
self.key_pid = ref_classdef.unique_key_pid
if self.key_size is None:
self.key_size = ref_classdef.unique_key_size
if self.weakref_index is None:
self.weakref_index = self.parent.root.weakref_index(self.pid_path)
self.ref = value.unique_key
self.data = self.encode()
self.add_pid_entry()
class WeakRefArrayProperty(Property):
__slots__ = ('references', 'index_name', 'weakref_index', 'key_pid', 'key_size')
def __init__(self, parent, pid, format, version=PROPERTY_VERSION):
super(WeakRefArrayProperty, self).__init__(parent, pid, format, version)
self.references = []
self.index_name = None
self.weakref_index = None
self.key_pid = None
self.key_size = None
def copy(self, parent):
p = super(WeakRefArrayProperty, self).copy(parent)
p.references = list(self.references)
p.index_name = self.index_name
p.weakref_index = self.weakref_index
p.key_pid = self.key_pid
p.key_size = self.key_size
return p
def encode(self, data):
return encode_utf16le(data)
def decode(self):
self.index_name = decode_utf16le(self.data)
def read_index(self):
index_name = self.index_name + " index"
index_dir = self.parent.dir.get(index_name)
if not index_dir:
raise AAFPropertyError("cannot find index stream: %s" % index_name)
s = index_dir.open('r')
# read the whole index
f = BytesIO(s.read())
count = read_u32le(f)
self.weakref_index = read_u16le(f)
self.key_pid = read_u16le(f)
self.key_size = read_u8(f)
assert self.key_size in (16, 32)
for i in range(count):
if self.key_size == 16:
key = AUID(bytes_le=f.read(self.key_size))
else:
key = key = MobID(bytes_le=f.read(self.key_size))
self.references.append(key)
@writeonly
def write_index(self):
s = self.parent.dir.touch(self.index_name + " index").open(mode='rw')
f = BytesIO()
count = len(self.references)
write_u32le(f, count)
write_u16le(f, self.weakref_index)
write_u16le(f, self.key_pid)
write_u8(f, self.key_size)
for item in self.references:
f.write(item.bytes_le)
s.write(f.getvalue())
s.truncate()
def __repr__(self):
return "<%s %s to %d items>" % (self.name, self.__class__.__name__, len(self.references) )
@property
def ref_classdef(self):
return self.typedef.element_typedef.ref_classdef
@property
def pid_path(self):
return self.typedef.element_typedef.pid_path
def __len__(self):
return len(self.references)
def __iter__(self):
for ref in self.references:
r = resolve_weakref(self, ref)
yield r
@writeonly
def extend(self, values):
ref_classdef = self.ref_classdef
# check values are the correct type
for item in values:
if not ref_classdef.isinstance(item.classdef):
raise TypeError("Invalid Value")
if self.index_name is None:
propdef = self.propertydef
self.index_name = mangle_name(propdef.property_name, self.pid, 32-10)
self.data = self.encode(self.index_name)
if self.weakref_index is None:
self.weakref_index = self.parent.root.weakref_index(self.pid_path)
if self.key_pid is None:
self.key_pid = ref_classdef.unique_key_pid
if self.key_size is None:
self.key_size = ref_classdef.unique_key_size
for item in values:
self.references.append(item.unique_key)
self.add_pid_entry()
def append(self, value):
self.extend([value])
@writeonly
def clear(self):
self.references = []
@property
def value(self):
return [item for item in self]
@value.setter
@writeonly
def value(self, value):
if value is None:
self.remove_pid_entry()
return
self.clear()
self.extend(value)
class WeakRefVectorProperty(WeakRefArrayProperty):
pass
class WeakRefSetProperty(WeakRefArrayProperty):
pass
# haven't see aaf files that contain these yet
class WeakRefPropertyId(WeakRefProperty):
pass
class UniqueIdProperty(Property):
pass
class OpaqueStreamProperty(Property):
pass
property_formats = {
SF_DATA : Property,
SF_DATA_STREAM : StreamProperty,
SF_STRONG_OBJECT_REFERENCE : StrongRefProperty,
SF_STRONG_OBJECT_REFERENCE_VECTOR : StrongRefVectorProperty,
SF_STRONG_OBJECT_REFERENCE_SET : StrongRefSetProperty,
SF_WEAK_OBJECT_REFERENCE : WeakRefProperty,
SF_WEAK_OBJECT_REFERENCE_VECTOR : WeakRefVectorProperty,
SF_WEAK_OBJECT_REFERENCE_SET : WeakRefSetProperty,
SF_WEAK_OBJECT_REFERENCE_STORED_OBJECT_ID : WeakRefPropertyId,
SF_UNIQUE_OBJECT_ID : UniqueIdProperty,
SF_OPAQUE_STREAM : OpaqueStreamProperty
}
def add_string_property(parent, pid, value):
p = Property(parent, pid, SF_DATA, PROPERTY_VERSION)
if value:
p.data = encode_utf16le(value)
parent.property_entries[pid] = p
return p
def add_bool_property(parent, pid, value):
p = Property(parent, pid, SF_DATA, PROPERTY_VERSION)
if value:
p.data = b"\x01"
else:
p.data = b"\x00"
parent.property_entries[pid] = p
return p
def add_u32le_property(parent, pid, value):
p = Property(parent, pid, SF_DATA, PROPERTY_VERSION)
if value is not None:
p.data = encode_u32le(value)
parent.property_entries[pid] = p
return p
def add_u16le_property(parent, pid, value):
p = Property(parent, pid, SF_DATA, PROPERTY_VERSION)
if value is not None:
p.data = encode_u16le(value)
parent.property_entries[pid] = p
return p
def add_u8_property(parent, pid, value):
p = Property(parent, pid, SF_DATA, PROPERTY_VERSION)
if value is not None:
p.data = encode_u8(value)
parent.property_entries[pid] = p
return p
def add_auid_property(parent, pid, value):
p = Property(parent, pid, SF_DATA, PROPERTY_VERSION)
if value is None:
value = AUID(int=0)
elif not isinstance(value, AUID):
value = AUID(value)
p.data = value.bytes_le
parent.property_entries[pid] = p
return p
def add_auid_array_propertry(parent, pid, values):
p = Property(parent, pid, SF_DATA, PROPERTY_VERSION)
p.data = encode_auid_array(values)
parent.property_entries[pid] = p
return p
def add_utf16_array_property(parent, pid, values):
p = Property(parent, pid, SF_DATA, PROPERTY_VERSION)
p.data = encode_utf16_array(values)
parent.property_entries[pid] = p
return p
def add_s64le_array_property(parent, pid, values):
p = Property(parent, pid, SF_DATA, PROPERTY_VERSION)
p.data = b''
for i in values:
p.data += encode_s64le(i)
parent.property_entries[pid] = p
return p
def add_weakref_property(parent, pid, pid_path, key_pid, value):
p = WeakRefProperty(parent, pid, SF_WEAK_OBJECT_REFERENCE, PROPERTY_VERSION)
p.weakref_index = parent.root.weakref_index(pid_path)
p.key_pid = key_pid
p.key_size = 16
if not isinstance(value, AUID):
value = AUID(value)
p.ref = value
p.data = p.encode()
parent.property_entries[pid] = p
return p
def add_classdef_weakref_property(parent, pid, value):
pid_path = [0x0001, 0x0003]
return add_weakref_property(parent, pid , pid_path, 0x0005, value)
def add_typedef_weakref_property(parent, pid, value):
pid_path = [0x0001, 0x0004]
return add_weakref_property(parent, pid , pid_path, 0x0005, value)
def add_strongref_set_property(parent, pid, property_name, unique_pid, key_size=16):
p = StrongRefSetProperty(parent, pid, SF_STRONG_OBJECT_REFERENCE_SET, PROPERTY_VERSION)
name = mangle_name(property_name, pid, 32-10)
p.index_name = name
p.data = p.encode(name)
p.key_pid = unique_pid
p.key_size = key_size
parent.property_entries[pid] = p
return p
def add_typedef_weakref_vector_property(parent, pid, property_name, values):
# kAAFTypeID_TypeDefinitionWeakReferenceVector
pid_path = [0x0001, 0x0004]
key_pid = 0x0005
p = WeakRefVectorProperty(parent, pid, SF_WEAK_OBJECT_REFERENCE_VECTOR, PROPERTY_VERSION)
p.weakref_index = parent.root.weakref_index(pid_path)
p.key_pid = key_pid
p.key_size = 16
p.index_name = mangle_name(property_name, pid, 32)
p.data = p.encode(p.index_name)
p.references = [AUID(v) for v in values]
parent.property_entries[pid] = p
return p
|
pseudonym117/Riot-Watcher
|
src/riotwatcher/_apis/MatchApiV4.py
|
MatchApiV4.matchlist_by_account
|
python
|
def matchlist_by_account(
self,
region,
encrypted_account_id,
queue=None,
begin_time=None,
end_time=None,
begin_index=None,
end_index=None,
season=None,
champion=None,
):
url, query = MatchApiV4Urls.matchlist_by_account(
region=region,
encrypted_account_id=encrypted_account_id,
queue=queue,
beginTime=begin_time,
endTime=end_time,
beginIndex=begin_index,
endIndex=end_index,
season=season,
champion=champion,
)
return self._raw_request(self.matchlist_by_account.__name__, region, url, query)
|
Get matchlist for ranked games played on given account ID and platform ID
and filtered using given filter parameters, if any
A number of optional parameters are provided for filtering. It is up to the caller to
ensure that the combination of filter parameters provided is valid for the requested
account, otherwise, no matches may be returned.
Note that if either beginIndex or endIndex are specified, then both must be specified and
endIndex must be greater than beginIndex.
If endTime is specified, but not beginTime, then beginTime is effectively the start of the
account's match history.
If beginTime is specified, but not endTime, then endTime is effectively the current time.
Note that endTime should be greater than beginTime if both are specified, although there is
no maximum limit on their range.
:param string region: The region to execute this request on
:param string encrypted_account_id: The account ID.
:param Set[int] queue: Set of queue IDs for which to filtering matchlist.
:param long begin_time: The begin time to use for filtering matchlist specified as
epoch milliseconds.
:param long end_time: The end time to use for filtering matchlist specified as epoch
milliseconds.
:param int begin_index: The begin index to use for filtering matchlist.
:param int end_index: The end index to use for filtering matchlist.
:param Set[int] season: Set of season IDs for which to filtering matchlist.
:param Set[int] champion: Set of champion IDs for which to filtering matchlist.
:returns: MatchlistDto
|
train
|
https://github.com/pseudonym117/Riot-Watcher/blob/21ab12453a0d824d67e30f5514d02a5c5a411dea/src/riotwatcher/_apis/MatchApiV4.py#L32-L88
|
[
"def _raw_request(self, method_name, region, url, query_params):\n \"\"\"\n Sends a request through the BaseApi instance provided, injecting the provided endpoint_name\n into the method call, so the caller doesn't have to.\n\n :param string method_name: The name of the calling method\n :param string region: The region to execute this request on\n :param string url: The full URL to the method being requested.\n :param dict query_params: Query parameters to be provided in the HTTP request\n \"\"\"\n return self._base_api.raw_request(\n self._endpoint_name, method_name, region, url, query_params\n )\n"
] |
class MatchApiV4(NamedEndpoint):
"""
This class wraps the Match-v4 endpoint calls provided by the Riot API.
See https://developer.riotgames.com/api-methods/#match-v4 for more detailed information
"""
def __init__(self, base_api):
"""
Initialize a new MatchApiV4 which uses the provided base_api
:param BaseApi base_api: the root API object to use for making all requests.
"""
super(MatchApiV4, self).__init__(base_api, self.__class__.__name__)
def by_id(self, region, match_id):
"""
Get match by match ID
:param string region: The region to execute this request on
:param long match_id: The match ID.
:returns: MatchDto
"""
url, query = MatchApiV4Urls.by_id(region=region, match_id=match_id)
return self._raw_request(self.by_id.__name__, region, url, query)
def timeline_by_match(self, region, match_id):
"""
Get match timeline by match ID.
Not all matches have timeline data.
:param string region: The region to execute this request on
:param long match_id: The match ID.
:returns: MatchTimelineDto
"""
url, query = MatchApiV4Urls.timeline_by_match(region=region, match_id=match_id)
return self._raw_request(self.timeline_by_match.__name__, region, url, query)
|
pseudonym117/Riot-Watcher
|
src/riotwatcher/_apis/MatchApiV4.py
|
MatchApiV4.timeline_by_match
|
python
|
def timeline_by_match(self, region, match_id):
url, query = MatchApiV4Urls.timeline_by_match(region=region, match_id=match_id)
return self._raw_request(self.timeline_by_match.__name__, region, url, query)
|
Get match timeline by match ID.
Not all matches have timeline data.
:param string region: The region to execute this request on
:param long match_id: The match ID.
:returns: MatchTimelineDto
|
train
|
https://github.com/pseudonym117/Riot-Watcher/blob/21ab12453a0d824d67e30f5514d02a5c5a411dea/src/riotwatcher/_apis/MatchApiV4.py#L90-L102
|
[
"def _raw_request(self, method_name, region, url, query_params):\n \"\"\"\n Sends a request through the BaseApi instance provided, injecting the provided endpoint_name\n into the method call, so the caller doesn't have to.\n\n :param string method_name: The name of the calling method\n :param string region: The region to execute this request on\n :param string url: The full URL to the method being requested.\n :param dict query_params: Query parameters to be provided in the HTTP request\n \"\"\"\n return self._base_api.raw_request(\n self._endpoint_name, method_name, region, url, query_params\n )\n"
] |
class MatchApiV4(NamedEndpoint):
"""
This class wraps the Match-v4 endpoint calls provided by the Riot API.
See https://developer.riotgames.com/api-methods/#match-v4 for more detailed information
"""
def __init__(self, base_api):
"""
Initialize a new MatchApiV4 which uses the provided base_api
:param BaseApi base_api: the root API object to use for making all requests.
"""
super(MatchApiV4, self).__init__(base_api, self.__class__.__name__)
def by_id(self, region, match_id):
"""
Get match by match ID
:param string region: The region to execute this request on
:param long match_id: The match ID.
:returns: MatchDto
"""
url, query = MatchApiV4Urls.by_id(region=region, match_id=match_id)
return self._raw_request(self.by_id.__name__, region, url, query)
def matchlist_by_account(
self,
region,
encrypted_account_id,
queue=None,
begin_time=None,
end_time=None,
begin_index=None,
end_index=None,
season=None,
champion=None,
):
"""
Get matchlist for ranked games played on given account ID and platform ID
and filtered using given filter parameters, if any
A number of optional parameters are provided for filtering. It is up to the caller to
ensure that the combination of filter parameters provided is valid for the requested
account, otherwise, no matches may be returned.
Note that if either beginIndex or endIndex are specified, then both must be specified and
endIndex must be greater than beginIndex.
If endTime is specified, but not beginTime, then beginTime is effectively the start of the
account's match history.
If beginTime is specified, but not endTime, then endTime is effectively the current time.
Note that endTime should be greater than beginTime if both are specified, although there is
no maximum limit on their range.
:param string region: The region to execute this request on
:param string encrypted_account_id: The account ID.
:param Set[int] queue: Set of queue IDs for which to filtering matchlist.
:param long begin_time: The begin time to use for filtering matchlist specified as
epoch milliseconds.
:param long end_time: The end time to use for filtering matchlist specified as epoch
milliseconds.
:param int begin_index: The begin index to use for filtering matchlist.
:param int end_index: The end index to use for filtering matchlist.
:param Set[int] season: Set of season IDs for which to filtering matchlist.
:param Set[int] champion: Set of champion IDs for which to filtering matchlist.
:returns: MatchlistDto
"""
url, query = MatchApiV4Urls.matchlist_by_account(
region=region,
encrypted_account_id=encrypted_account_id,
queue=queue,
beginTime=begin_time,
endTime=end_time,
beginIndex=begin_index,
endIndex=end_index,
season=season,
champion=champion,
)
return self._raw_request(self.matchlist_by_account.__name__, region, url, query)
|
pseudonym117/Riot-Watcher
|
src/riotwatcher/Handlers/RateLimit/RateLimitHandler.py
|
RateLimitHandler.preview_request
|
python
|
def preview_request(self, region, endpoint_name, method_name, url, query_params):
wait_until = max(
[
(
limiter.wait_until(region, endpoint_name, method_name),
limiter.friendly_name,
)
for limiter in self._limiters
],
key=lambda lim_pair: lim_pair[0]
if lim_pair[0]
else datetime.datetime(datetime.MINYEAR, 1, 1),
)
if wait_until[0] is not None and wait_until[0] > datetime.datetime.now():
to_wait = wait_until[0] - datetime.datetime.now()
logging.info(
"waiting for %s seconds due to %s limit...",
to_wait.total_seconds(),
wait_until[1],
)
time.sleep(to_wait.total_seconds())
|
called before a request is processed.
:param string region: the region of this request
:param string endpoint_name: the name of the endpoint being requested
:param string method_name: the name of the method being requested
:param url: the URL that is being requested.
:param query_params: dict: the parameters to the url that is being queried,
e.g. ?key1=val&key2=val2
|
train
|
https://github.com/pseudonym117/Riot-Watcher/blob/21ab12453a0d824d67e30f5514d02a5c5a411dea/src/riotwatcher/Handlers/RateLimit/RateLimitHandler.py#L20-L52
| null |
class RateLimitHandler(RequestHandler):
def __init__(self):
super(RateLimitHandler, self).__init__()
self._limiters = (
ApplicationRateLimiter(),
MethodRateLimiter(),
OopsRateLimiter(),
)
def after_request(self, region, endpoint_name, method_name, url, response):
"""
Called after a response is received and before it is returned to the user.
:param string region: the region of this request
:param string endpoint_name: the name of the endpoint that was requested
:param string method_name: the name of the method that was requested
:param url: The url that was requested
:param response: the response received. This is a response from the Requests library
"""
for limiter in self._limiters:
limiter.update_limiter(region, endpoint_name, method_name, response)
return response
|
pseudonym117/Riot-Watcher
|
src/riotwatcher/Handlers/RateLimit/RateLimitHandler.py
|
RateLimitHandler.after_request
|
python
|
def after_request(self, region, endpoint_name, method_name, url, response):
for limiter in self._limiters:
limiter.update_limiter(region, endpoint_name, method_name, response)
return response
|
Called after a response is received and before it is returned to the user.
:param string region: the region of this request
:param string endpoint_name: the name of the endpoint that was requested
:param string method_name: the name of the method that was requested
:param url: The url that was requested
:param response: the response received. This is a response from the Requests library
|
train
|
https://github.com/pseudonym117/Riot-Watcher/blob/21ab12453a0d824d67e30f5514d02a5c5a411dea/src/riotwatcher/Handlers/RateLimit/RateLimitHandler.py#L54-L67
| null |
class RateLimitHandler(RequestHandler):
def __init__(self):
super(RateLimitHandler, self).__init__()
self._limiters = (
ApplicationRateLimiter(),
MethodRateLimiter(),
OopsRateLimiter(),
)
def preview_request(self, region, endpoint_name, method_name, url, query_params):
"""
called before a request is processed.
:param string region: the region of this request
:param string endpoint_name: the name of the endpoint being requested
:param string method_name: the name of the method being requested
:param url: the URL that is being requested.
:param query_params: dict: the parameters to the url that is being queried,
e.g. ?key1=val&key2=val2
"""
wait_until = max(
[
(
limiter.wait_until(region, endpoint_name, method_name),
limiter.friendly_name,
)
for limiter in self._limiters
],
key=lambda lim_pair: lim_pair[0]
if lim_pair[0]
else datetime.datetime(datetime.MINYEAR, 1, 1),
)
if wait_until[0] is not None and wait_until[0] > datetime.datetime.now():
to_wait = wait_until[0] - datetime.datetime.now()
logging.info(
"waiting for %s seconds due to %s limit...",
to_wait.total_seconds(),
wait_until[1],
)
time.sleep(to_wait.total_seconds())
|
pseudonym117/Riot-Watcher
|
src/riotwatcher/_apis/LolStatusApiV3.py
|
LolStatusApiV3.shard_data
|
python
|
def shard_data(self, region):
url, query = LolStatusApiV3Urls.shard_data(region=region)
return self._raw_request(self.shard_data.__name__, region, url, query)
|
Get League of Legends status for the given shard.
Requests to this API are not counted against the application Rate Limits.
:param string region: the region to execute this request on
:returns: ShardStatus
|
train
|
https://github.com/pseudonym117/Riot-Watcher/blob/21ab12453a0d824d67e30f5514d02a5c5a411dea/src/riotwatcher/_apis/LolStatusApiV3.py#L20-L31
|
[
"def _raw_request(self, method_name, region, url, query_params):\n \"\"\"\n Sends a request through the BaseApi instance provided, injecting the provided endpoint_name\n into the method call, so the caller doesn't have to.\n\n :param string method_name: The name of the calling method\n :param string region: The region to execute this request on\n :param string url: The full URL to the method being requested.\n :param dict query_params: Query parameters to be provided in the HTTP request\n \"\"\"\n return self._base_api.raw_request(\n self._endpoint_name, method_name, region, url, query_params\n )\n"
] |
class LolStatusApiV3(NamedEndpoint):
"""
This class wraps the LoL-Status-v3 Api calls provided by the Riot API.
See https://developer.riotgames.com/api-methods/#lol-status-v3 for more detailed information
"""
def __init__(self, base_api):
"""
Initialize a new LolStatusApiV3 which uses the provided base_api
:param BaseApi base_api: the root API object to use for making all requests.
"""
super(LolStatusApiV3, self).__init__(base_api, LolStatusApiV3.__name__)
|
pseudonym117/Riot-Watcher
|
src/riotwatcher/Handlers/TypeCorrectorHandler.py
|
TypeCorrectorHandler.preview_request
|
python
|
def preview_request(self, region, endpoint_name, method_name, url, query_params):
if query_params is not None:
for key, value in query_params.items():
if isinstance(value, bool):
query_params[key] = str(value).lower()
# check to see if we have a list/tuple, but not a string
if (
not hasattr(value, "strip")
and hasattr(value, "__getitem__")
or hasattr(value, "__iter__")
):
for idx, val in enumerate(value):
if isinstance(val, bool):
value[idx] = str(val).lower()
|
called before a request is processed.
:param string endpoint_name: the name of the endpoint being requested
:param string method_name: the name of the method being requested
:param url: the URL that is being requested.
:param query_params: dict: the parameters to the url that is being queried,
e.g. ?key1=val&key2=val2
|
train
|
https://github.com/pseudonym117/Riot-Watcher/blob/21ab12453a0d824d67e30f5514d02a5c5a411dea/src/riotwatcher/Handlers/TypeCorrectorHandler.py#L13-L36
| null |
class TypeCorrectorHandler(RequestHandler):
"""
The TypeCorrector class is meant to correct any inconsistencies in the types
of objects provided as query parameters.
Currently this only involves changing boolean values into strings,
as the API only accepts lower case booleans for some reason.
"""
|
pseudonym117/Riot-Watcher
|
src/riotwatcher/_apis/ChampionMasteryApiV4.py
|
ChampionMasteryApiV4.by_summoner_by_champion
|
python
|
def by_summoner_by_champion(self, region, encrypted_summoner_id, champion_id):
url, query = ChampionMasteryApiV4Urls.by_summoner_by_champion(
region=region,
encrypted_summoner_id=encrypted_summoner_id,
champion_id=champion_id,
)
return self._raw_request(
self.by_summoner_by_champion.__name__, region, url, query
)
|
Get a champion mastery by player ID and champion ID.
:param string region: the region to execute this request on
:param string encrypted_summoner_id: Summoner ID associated with the player
:param long champion_id: Champion ID to retrieve Champion Mastery for
:returns: ChampionMasteryDTO: This object contains single Champion Mastery information for
player and champion combination.
|
train
|
https://github.com/pseudonym117/Riot-Watcher/blob/21ab12453a0d824d67e30f5514d02a5c5a411dea/src/riotwatcher/_apis/ChampionMasteryApiV4.py#L36-L54
|
[
"def _raw_request(self, method_name, region, url, query_params):\n \"\"\"\n Sends a request through the BaseApi instance provided, injecting the provided endpoint_name\n into the method call, so the caller doesn't have to.\n\n :param string method_name: The name of the calling method\n :param string region: The region to execute this request on\n :param string url: The full URL to the method being requested.\n :param dict query_params: Query parameters to be provided in the HTTP request\n \"\"\"\n return self._base_api.raw_request(\n self._endpoint_name, method_name, region, url, query_params\n )\n"
] |
class ChampionMasteryApiV4(NamedEndpoint):
"""
This class wraps the Champion-Mastery-v4 Api calls provided by the Riot API.
See https://developer.riotgames.com/api-methods/#champion-mastery-v4/ for more detailed
information
"""
def __init__(self, base_api):
"""
Initialize a new ChampionMasteryApiV4 which uses the provided base_api
:param BaseApi base_api: the root API object to use for making all requests.
"""
super(ChampionMasteryApiV4, self).__init__(base_api, self.__class__.__name__)
def by_summoner(self, region, encrypted_summoner_id):
"""
Get all champion mastery entries.
:param string region: the region to execute this request on
:param string encrypted_summoner_id: Summoner ID associated with the player
:returns: List[ChampionMasteryDTO]: This object contains a list of Champion Mastery
information for player and champion combination.
"""
url, query = ChampionMasteryApiV4Urls.by_summoner(
region=region, encrypted_summoner_id=encrypted_summoner_id
)
return self._raw_request(self.by_summoner.__name__, region, url, query)
def scores_by_summoner(self, region, encrypted_summoner_id):
"""
Get a player's total champion mastery score, which is the sum of individual champion
mastery levels
:param string region: the region to execute this request on
:param string encrypted_summoner_id: Summoner ID associated with the player
:returns: int
"""
url, query = ChampionMasteryApiV4Urls.scores_by_summoner(
region=region, encrypted_summoner_id=encrypted_summoner_id
)
return self._raw_request(self.scores_by_summoner.__name__, region, url, query)
|
pseudonym117/Riot-Watcher
|
src/riotwatcher/_apis/SummonerApiV4.py
|
SummonerApiV4.by_account
|
python
|
def by_account(self, region, encrypted_account_id):
url, query = SummonerApiV4Urls.by_account(
region=region, encrypted_account_id=encrypted_account_id
)
return self._raw_request(self.by_account.__name__, region, url, query)
|
Get a summoner by account ID.
:param string region: The region to execute this request on
:param string encrypted_account_id: The account ID.
:returns: SummonerDTO: represents a summoner
|
train
|
https://github.com/pseudonym117/Riot-Watcher/blob/21ab12453a0d824d67e30f5514d02a5c5a411dea/src/riotwatcher/_apis/SummonerApiV4.py#L20-L32
|
[
"def _raw_request(self, method_name, region, url, query_params):\n \"\"\"\n Sends a request through the BaseApi instance provided, injecting the provided endpoint_name\n into the method call, so the caller doesn't have to.\n\n :param string method_name: The name of the calling method\n :param string region: The region to execute this request on\n :param string url: The full URL to the method being requested.\n :param dict query_params: Query parameters to be provided in the HTTP request\n \"\"\"\n return self._base_api.raw_request(\n self._endpoint_name, method_name, region, url, query_params\n )\n"
] |
class SummonerApiV4(NamedEndpoint):
"""
This class wraps the Summoner-v4 endpoint calls provided by the Riot API.
See https://developer.riotgames.com/api-methods/#summoner-v4 for more detailed information
"""
def __init__(self, base_api):
"""
Initialize a new SummonerApiV4 which uses the provided base_api
:param BaseApi base_api: the root API object to use for making all requests.
"""
super(SummonerApiV4, self).__init__(base_api, self.__class__.__name__)
def by_name(self, region, summoner_name):
"""
Get a summoner by summoner name
:param string region: The region to execute this request on
:param string summoner_name: Summoner Name
:returns: SummonerDTO: represents a summoner
"""
url, query = SummonerApiV4Urls.by_name(
region=region, summoner_name=summoner_name
)
return self._raw_request(self.by_name.__name__, region, url, query)
def by_puuid(self, region, encrypted_puuid):
"""
Get a summoner by PUUID.
:param string region: The region to execute this request on
:param string encrypted_puuid: PUUID
:returns: SummonerDTO: represents a summoner
"""
url, query = SummonerApiV4Urls.by_puuid(
region=region, encrypted_puuid=encrypted_puuid
)
return self._raw_request(self.by_puuid.__name__, region, url, query)
def by_id(self, region, encrypted_summoner_id):
"""
Get a summoner by summoner ID.
:param string region: The region to execute this request on
:param string encrypted_summoner_id: Summoner ID
:returns: SummonerDTO: represents a summoner
"""
url, query = SummonerApiV4Urls.by_id(
region=region, encrypted_summoner_id=encrypted_summoner_id
)
return self._raw_request(self.by_id.__name__, region, url, query)
|
pseudonym117/Riot-Watcher
|
src/riotwatcher/_apis/SummonerApiV4.py
|
SummonerApiV4.by_name
|
python
|
def by_name(self, region, summoner_name):
url, query = SummonerApiV4Urls.by_name(
region=region, summoner_name=summoner_name
)
return self._raw_request(self.by_name.__name__, region, url, query)
|
Get a summoner by summoner name
:param string region: The region to execute this request on
:param string summoner_name: Summoner Name
:returns: SummonerDTO: represents a summoner
|
train
|
https://github.com/pseudonym117/Riot-Watcher/blob/21ab12453a0d824d67e30f5514d02a5c5a411dea/src/riotwatcher/_apis/SummonerApiV4.py#L34-L46
|
[
"def _raw_request(self, method_name, region, url, query_params):\n \"\"\"\n Sends a request through the BaseApi instance provided, injecting the provided endpoint_name\n into the method call, so the caller doesn't have to.\n\n :param string method_name: The name of the calling method\n :param string region: The region to execute this request on\n :param string url: The full URL to the method being requested.\n :param dict query_params: Query parameters to be provided in the HTTP request\n \"\"\"\n return self._base_api.raw_request(\n self._endpoint_name, method_name, region, url, query_params\n )\n"
] |
class SummonerApiV4(NamedEndpoint):
"""
This class wraps the Summoner-v4 endpoint calls provided by the Riot API.
See https://developer.riotgames.com/api-methods/#summoner-v4 for more detailed information
"""
def __init__(self, base_api):
"""
Initialize a new SummonerApiV4 which uses the provided base_api
:param BaseApi base_api: the root API object to use for making all requests.
"""
super(SummonerApiV4, self).__init__(base_api, self.__class__.__name__)
def by_account(self, region, encrypted_account_id):
"""
Get a summoner by account ID.
:param string region: The region to execute this request on
:param string encrypted_account_id: The account ID.
:returns: SummonerDTO: represents a summoner
"""
url, query = SummonerApiV4Urls.by_account(
region=region, encrypted_account_id=encrypted_account_id
)
return self._raw_request(self.by_account.__name__, region, url, query)
def by_puuid(self, region, encrypted_puuid):
"""
Get a summoner by PUUID.
:param string region: The region to execute this request on
:param string encrypted_puuid: PUUID
:returns: SummonerDTO: represents a summoner
"""
url, query = SummonerApiV4Urls.by_puuid(
region=region, encrypted_puuid=encrypted_puuid
)
return self._raw_request(self.by_puuid.__name__, region, url, query)
def by_id(self, region, encrypted_summoner_id):
"""
Get a summoner by summoner ID.
:param string region: The region to execute this request on
:param string encrypted_summoner_id: Summoner ID
:returns: SummonerDTO: represents a summoner
"""
url, query = SummonerApiV4Urls.by_id(
region=region, encrypted_summoner_id=encrypted_summoner_id
)
return self._raw_request(self.by_id.__name__, region, url, query)
|
pseudonym117/Riot-Watcher
|
src/riotwatcher/_apis/SummonerApiV4.py
|
SummonerApiV4.by_puuid
|
python
|
def by_puuid(self, region, encrypted_puuid):
url, query = SummonerApiV4Urls.by_puuid(
region=region, encrypted_puuid=encrypted_puuid
)
return self._raw_request(self.by_puuid.__name__, region, url, query)
|
Get a summoner by PUUID.
:param string region: The region to execute this request on
:param string encrypted_puuid: PUUID
:returns: SummonerDTO: represents a summoner
|
train
|
https://github.com/pseudonym117/Riot-Watcher/blob/21ab12453a0d824d67e30f5514d02a5c5a411dea/src/riotwatcher/_apis/SummonerApiV4.py#L48-L60
|
[
"def _raw_request(self, method_name, region, url, query_params):\n \"\"\"\n Sends a request through the BaseApi instance provided, injecting the provided endpoint_name\n into the method call, so the caller doesn't have to.\n\n :param string method_name: The name of the calling method\n :param string region: The region to execute this request on\n :param string url: The full URL to the method being requested.\n :param dict query_params: Query parameters to be provided in the HTTP request\n \"\"\"\n return self._base_api.raw_request(\n self._endpoint_name, method_name, region, url, query_params\n )\n"
] |
class SummonerApiV4(NamedEndpoint):
"""
This class wraps the Summoner-v4 endpoint calls provided by the Riot API.
See https://developer.riotgames.com/api-methods/#summoner-v4 for more detailed information
"""
def __init__(self, base_api):
"""
Initialize a new SummonerApiV4 which uses the provided base_api
:param BaseApi base_api: the root API object to use for making all requests.
"""
super(SummonerApiV4, self).__init__(base_api, self.__class__.__name__)
def by_account(self, region, encrypted_account_id):
"""
Get a summoner by account ID.
:param string region: The region to execute this request on
:param string encrypted_account_id: The account ID.
:returns: SummonerDTO: represents a summoner
"""
url, query = SummonerApiV4Urls.by_account(
region=region, encrypted_account_id=encrypted_account_id
)
return self._raw_request(self.by_account.__name__, region, url, query)
def by_name(self, region, summoner_name):
"""
Get a summoner by summoner name
:param string region: The region to execute this request on
:param string summoner_name: Summoner Name
:returns: SummonerDTO: represents a summoner
"""
url, query = SummonerApiV4Urls.by_name(
region=region, summoner_name=summoner_name
)
return self._raw_request(self.by_name.__name__, region, url, query)
def by_id(self, region, encrypted_summoner_id):
"""
Get a summoner by summoner ID.
:param string region: The region to execute this request on
:param string encrypted_summoner_id: Summoner ID
:returns: SummonerDTO: represents a summoner
"""
url, query = SummonerApiV4Urls.by_id(
region=region, encrypted_summoner_id=encrypted_summoner_id
)
return self._raw_request(self.by_id.__name__, region, url, query)
|
pseudonym117/Riot-Watcher
|
src/riotwatcher/_apis/SummonerApiV4.py
|
SummonerApiV4.by_id
|
python
|
def by_id(self, region, encrypted_summoner_id):
url, query = SummonerApiV4Urls.by_id(
region=region, encrypted_summoner_id=encrypted_summoner_id
)
return self._raw_request(self.by_id.__name__, region, url, query)
|
Get a summoner by summoner ID.
:param string region: The region to execute this request on
:param string encrypted_summoner_id: Summoner ID
:returns: SummonerDTO: represents a summoner
|
train
|
https://github.com/pseudonym117/Riot-Watcher/blob/21ab12453a0d824d67e30f5514d02a5c5a411dea/src/riotwatcher/_apis/SummonerApiV4.py#L62-L74
|
[
"def _raw_request(self, method_name, region, url, query_params):\n \"\"\"\n Sends a request through the BaseApi instance provided, injecting the provided endpoint_name\n into the method call, so the caller doesn't have to.\n\n :param string method_name: The name of the calling method\n :param string region: The region to execute this request on\n :param string url: The full URL to the method being requested.\n :param dict query_params: Query parameters to be provided in the HTTP request\n \"\"\"\n return self._base_api.raw_request(\n self._endpoint_name, method_name, region, url, query_params\n )\n"
] |
class SummonerApiV4(NamedEndpoint):
"""
This class wraps the Summoner-v4 endpoint calls provided by the Riot API.
See https://developer.riotgames.com/api-methods/#summoner-v4 for more detailed information
"""
def __init__(self, base_api):
"""
Initialize a new SummonerApiV4 which uses the provided base_api
:param BaseApi base_api: the root API object to use for making all requests.
"""
super(SummonerApiV4, self).__init__(base_api, self.__class__.__name__)
def by_account(self, region, encrypted_account_id):
"""
Get a summoner by account ID.
:param string region: The region to execute this request on
:param string encrypted_account_id: The account ID.
:returns: SummonerDTO: represents a summoner
"""
url, query = SummonerApiV4Urls.by_account(
region=region, encrypted_account_id=encrypted_account_id
)
return self._raw_request(self.by_account.__name__, region, url, query)
def by_name(self, region, summoner_name):
"""
Get a summoner by summoner name
:param string region: The region to execute this request on
:param string summoner_name: Summoner Name
:returns: SummonerDTO: represents a summoner
"""
url, query = SummonerApiV4Urls.by_name(
region=region, summoner_name=summoner_name
)
return self._raw_request(self.by_name.__name__, region, url, query)
def by_puuid(self, region, encrypted_puuid):
"""
Get a summoner by PUUID.
:param string region: The region to execute this request on
:param string encrypted_puuid: PUUID
:returns: SummonerDTO: represents a summoner
"""
url, query = SummonerApiV4Urls.by_puuid(
region=region, encrypted_puuid=encrypted_puuid
)
return self._raw_request(self.by_puuid.__name__, region, url, query)
|
pseudonym117/Riot-Watcher
|
src/riotwatcher/_apis/SpectatorApiV4.py
|
SpectatorApiV4.featured_games
|
python
|
def featured_games(self, region):
url, query = SpectatorApiV4Urls.featured_games(region=region)
return self._raw_request(self.featured_games.__name__, region, url, query)
|
Get list of featured games.
:param string region: The region to execute this request on
:returns: FeaturedGames
|
train
|
https://github.com/pseudonym117/Riot-Watcher/blob/21ab12453a0d824d67e30f5514d02a5c5a411dea/src/riotwatcher/_apis/SpectatorApiV4.py#L34-L43
|
[
"def _raw_request(self, method_name, region, url, query_params):\n \"\"\"\n Sends a request through the BaseApi instance provided, injecting the provided endpoint_name\n into the method call, so the caller doesn't have to.\n\n :param string method_name: The name of the calling method\n :param string region: The region to execute this request on\n :param string url: The full URL to the method being requested.\n :param dict query_params: Query parameters to be provided in the HTTP request\n \"\"\"\n return self._base_api.raw_request(\n self._endpoint_name, method_name, region, url, query_params\n )\n"
] |
class SpectatorApiV4(NamedEndpoint):
"""
This class wraps the Spectator-v4 endpoint calls provided by the Riot API.
See https://developer.riotgames.com/api-methods/#spectator-v4 for more detailed information
"""
def __init__(self, base_api):
"""
Initialize a new SpectatorApiV3 which uses the provided base_api
:param BaseApi base_api: the root API object to use for making all requests.
"""
super(SpectatorApiV4, self).__init__(base_api, self.__class__.__name__)
def by_summoner(self, region, encrypted_summoner_id):
"""
Get current game information for the given summoner ID
:param string region: The region to execute this request on
:param string encrypted_summoner_id: The ID of the summoner.
:returns: CurrentGameInfo
"""
url, query = SpectatorApiV4Urls.by_summoner(
region=region, encrypted_summoner_id=encrypted_summoner_id
)
return self._raw_request(self.by_summoner.__name__, region, url, query)
|
pseudonym117/Riot-Watcher
|
src/riotwatcher/_apis/ThirdPartyCodeApiV4.py
|
ThirdPartyCodeApiV4.by_summoner
|
python
|
def by_summoner(self, region, encrypted_summoner_id):
url, query = ThirdPartyCodeApiV4Urls.by_summoner(
region=region, encrypted_summoner_id=encrypted_summoner_id
)
return self._raw_request(self.by_summoner.__name__, region, url, query)
|
FOR KR SUMMONERS, A 404 WILL ALWAYS BE RETURNED.
Valid codes must be no longer than 256 characters and only use
valid characters: 0-9, a-z, A-Z, and -
:param string region: the region to execute this request on
:param string encrypted_summoner_id: Summoner ID
:returns: string
|
train
|
https://github.com/pseudonym117/Riot-Watcher/blob/21ab12453a0d824d67e30f5514d02a5c5a411dea/src/riotwatcher/_apis/ThirdPartyCodeApiV4.py#L21-L37
|
[
"def _raw_request(self, method_name, region, url, query_params):\n \"\"\"\n Sends a request through the BaseApi instance provided, injecting the provided endpoint_name\n into the method call, so the caller doesn't have to.\n\n :param string method_name: The name of the calling method\n :param string region: The region to execute this request on\n :param string url: The full URL to the method being requested.\n :param dict query_params: Query parameters to be provided in the HTTP request\n \"\"\"\n return self._base_api.raw_request(\n self._endpoint_name, method_name, region, url, query_params\n )\n"
] |
class ThirdPartyCodeApiV4(NamedEndpoint):
"""
This class wraps the ThirdPartyCode-v4 Api calls provided by the Riot API.
See https://developer.riotgames.com/api-methods/#third-party-code-v4 for more detailed
information
"""
def __init__(self, base_api):
"""
Initialize a new ThirdPartyCodeApiV4 which uses the provided base_api
:param BaseApi base_api: the root API object to use for making all requests.
"""
super(ThirdPartyCodeApiV4, self).__init__(base_api, self.__class__.__name__)
|
pseudonym117/Riot-Watcher
|
src/riotwatcher/_apis/NamedEndpoint.py
|
NamedEndpoint._raw_request
|
python
|
def _raw_request(self, method_name, region, url, query_params):
return self._base_api.raw_request(
self._endpoint_name, method_name, region, url, query_params
)
|
Sends a request through the BaseApi instance provided, injecting the provided endpoint_name
into the method call, so the caller doesn't have to.
:param string method_name: The name of the calling method
:param string region: The region to execute this request on
:param string url: The full URL to the method being requested.
:param dict query_params: Query parameters to be provided in the HTTP request
|
train
|
https://github.com/pseudonym117/Riot-Watcher/blob/21ab12453a0d824d67e30f5514d02a5c5a411dea/src/riotwatcher/_apis/NamedEndpoint.py#L18-L30
| null |
class NamedEndpoint(object):
"""
Helper class to inject endpoint name into calls to a BaseApi instance without
the child class explicitly adding the name every time.
"""
def __init__(self, base_api, endpoint_name):
"""
Initialize a new NamedEndpoint which uses the provided base_api and
injects the provided endpoint_name into calls to _request
:param BaseApi base_api: the root API object to use for making all requests.
:param string endpoint_name: the name of the child endpoint
"""
self._base_api = base_api
self._endpoint_name = endpoint_name
|
pseudonym117/Riot-Watcher
|
src/riotwatcher/_apis/LeagueApiV4.py
|
LeagueApiV4.challenger_by_queue
|
python
|
def challenger_by_queue(self, region, queue):
url, query = LeagueApiV4Urls.challenger_by_queue(region=region, queue=queue)
return self._raw_request(self.challenger_by_queue.__name__, region, url, query)
|
Get the challenger league for a given queue.
:param string region: the region to execute this request on
:param string queue: the queue to get the challenger players for
:returns: LeagueListDTO
|
train
|
https://github.com/pseudonym117/Riot-Watcher/blob/21ab12453a0d824d67e30f5514d02a5c5a411dea/src/riotwatcher/_apis/LeagueApiV4.py#L20-L30
|
[
"def _raw_request(self, method_name, region, url, query_params):\n \"\"\"\n Sends a request through the BaseApi instance provided, injecting the provided endpoint_name\n into the method call, so the caller doesn't have to.\n\n :param string method_name: The name of the calling method\n :param string region: The region to execute this request on\n :param string url: The full URL to the method being requested.\n :param dict query_params: Query parameters to be provided in the HTTP request\n \"\"\"\n return self._base_api.raw_request(\n self._endpoint_name, method_name, region, url, query_params\n )\n"
] |
class LeagueApiV4(NamedEndpoint):
"""
This class wraps the League-v4 Api calls provided by the Riot API.
See https://developer.riotgames.com/api-methods/#league-v4/ for more detailed information
"""
def __init__(self, base_api):
"""
Initialize a new LeagueApiV4 which uses the provided base_api
:param BaseApi base_api: the root API object to use for making all requests.
"""
super(LeagueApiV4, self).__init__(base_api, self.__class__.__name__)
def grandmaster_by_queue(self, region, queue):
"""
Get the grandmaster league for a given queue.
:param string region: the region to execute this request on
:param string queue: the queue to get the grandmaster players for
:returns: LeagueListDTO
"""
url, query = LeagueApiV4Urls.grandmaster_by_queue(region=region, queue=queue)
return self._raw_request(self.grandmaster_by_queue.__name__, region, url, query)
def masters_by_queue(self, region, queue):
"""
Get the master league for a given queue.
:param string region: the region to execute this request on
:param string queue: the queue to get the master players for
:returns: LeagueListDTO
"""
url, query = LeagueApiV4Urls.master_by_queue(region=region, queue=queue)
return self._raw_request(self.masters_by_queue.__name__, region, url, query)
def by_id(self, region, league_id):
"""
Get league with given ID, including inactive entries
:param string region: the region to execute this request on
:param string league_id: the league ID to query
:returns: LeagueListDTO
"""
url, query = LeagueApiV4Urls.by_id(region=region, league_id=league_id)
return self._raw_request(self.by_id.__name__, region, url, query)
def by_summoner(self, region, encrypted_summoner_id):
"""
Get league entries in all queues for a given summoner ID
:param string region: the region to execute this request on
:param string encrypted_summoner_id: the summoner ID to query
:returns: Set[LeagueEntryDTO]
"""
url, query = LeagueApiV4Urls.by_summoner(
region=region, encrypted_summoner_id=encrypted_summoner_id
)
return self._raw_request(self.by_summoner.__name__, region, url, query)
def entries(self, region, queue, tier, division):
"""
Get all the league entries
:param string region: the region to execute this request on
:param string queue: the queue to query, i.e. RANKED_SOLO_5x5
:param string tier: the tier to query, i.e. DIAMOND
:param string division: the division to query, i.e. III
:returns: Set[LeagueEntryDTO]
"""
url, query = LeagueApiV4Urls.entries(
region=region, queue=queue, tier=tier, division=division
)
return self._raw_request(self.entries.__name__, region, url, query)
def positions_by_summoner(self, region, encrypted_summoner_id):
"""
DEPRECATED
Get league positions in all queues for a given summoner ID
:param string region: the region to execute this request on
:param string encrypted_summoner_id: the summoner ID to query
:returns: Set[LeaguePositionDTO]
"""
url, query = LeagueApiV4Urls.positions_by_summoner(
region=region, encrypted_summoner_id=encrypted_summoner_id
)
return self._raw_request(
self.positions_by_summoner.__name__, region, url, query
)
|
pseudonym117/Riot-Watcher
|
src/riotwatcher/_apis/LeagueApiV4.py
|
LeagueApiV4.masters_by_queue
|
python
|
def masters_by_queue(self, region, queue):
url, query = LeagueApiV4Urls.master_by_queue(region=region, queue=queue)
return self._raw_request(self.masters_by_queue.__name__, region, url, query)
|
Get the master league for a given queue.
:param string region: the region to execute this request on
:param string queue: the queue to get the master players for
:returns: LeagueListDTO
|
train
|
https://github.com/pseudonym117/Riot-Watcher/blob/21ab12453a0d824d67e30f5514d02a5c5a411dea/src/riotwatcher/_apis/LeagueApiV4.py#L44-L54
|
[
"def _raw_request(self, method_name, region, url, query_params):\n \"\"\"\n Sends a request through the BaseApi instance provided, injecting the provided endpoint_name\n into the method call, so the caller doesn't have to.\n\n :param string method_name: The name of the calling method\n :param string region: The region to execute this request on\n :param string url: The full URL to the method being requested.\n :param dict query_params: Query parameters to be provided in the HTTP request\n \"\"\"\n return self._base_api.raw_request(\n self._endpoint_name, method_name, region, url, query_params\n )\n"
] |
class LeagueApiV4(NamedEndpoint):
"""
This class wraps the League-v4 Api calls provided by the Riot API.
See https://developer.riotgames.com/api-methods/#league-v4/ for more detailed information
"""
def __init__(self, base_api):
"""
Initialize a new LeagueApiV4 which uses the provided base_api
:param BaseApi base_api: the root API object to use for making all requests.
"""
super(LeagueApiV4, self).__init__(base_api, self.__class__.__name__)
def challenger_by_queue(self, region, queue):
"""
Get the challenger league for a given queue.
:param string region: the region to execute this request on
:param string queue: the queue to get the challenger players for
:returns: LeagueListDTO
"""
url, query = LeagueApiV4Urls.challenger_by_queue(region=region, queue=queue)
return self._raw_request(self.challenger_by_queue.__name__, region, url, query)
def grandmaster_by_queue(self, region, queue):
"""
Get the grandmaster league for a given queue.
:param string region: the region to execute this request on
:param string queue: the queue to get the grandmaster players for
:returns: LeagueListDTO
"""
url, query = LeagueApiV4Urls.grandmaster_by_queue(region=region, queue=queue)
return self._raw_request(self.grandmaster_by_queue.__name__, region, url, query)
def by_id(self, region, league_id):
"""
Get league with given ID, including inactive entries
:param string region: the region to execute this request on
:param string league_id: the league ID to query
:returns: LeagueListDTO
"""
url, query = LeagueApiV4Urls.by_id(region=region, league_id=league_id)
return self._raw_request(self.by_id.__name__, region, url, query)
def by_summoner(self, region, encrypted_summoner_id):
"""
Get league entries in all queues for a given summoner ID
:param string region: the region to execute this request on
:param string encrypted_summoner_id: the summoner ID to query
:returns: Set[LeagueEntryDTO]
"""
url, query = LeagueApiV4Urls.by_summoner(
region=region, encrypted_summoner_id=encrypted_summoner_id
)
return self._raw_request(self.by_summoner.__name__, region, url, query)
def entries(self, region, queue, tier, division):
"""
Get all the league entries
:param string region: the region to execute this request on
:param string queue: the queue to query, i.e. RANKED_SOLO_5x5
:param string tier: the tier to query, i.e. DIAMOND
:param string division: the division to query, i.e. III
:returns: Set[LeagueEntryDTO]
"""
url, query = LeagueApiV4Urls.entries(
region=region, queue=queue, tier=tier, division=division
)
return self._raw_request(self.entries.__name__, region, url, query)
def positions_by_summoner(self, region, encrypted_summoner_id):
"""
DEPRECATED
Get league positions in all queues for a given summoner ID
:param string region: the region to execute this request on
:param string encrypted_summoner_id: the summoner ID to query
:returns: Set[LeaguePositionDTO]
"""
url, query = LeagueApiV4Urls.positions_by_summoner(
region=region, encrypted_summoner_id=encrypted_summoner_id
)
return self._raw_request(
self.positions_by_summoner.__name__, region, url, query
)
|
pseudonym117/Riot-Watcher
|
src/riotwatcher/_apis/LeagueApiV4.py
|
LeagueApiV4.by_id
|
python
|
def by_id(self, region, league_id):
url, query = LeagueApiV4Urls.by_id(region=region, league_id=league_id)
return self._raw_request(self.by_id.__name__, region, url, query)
|
Get league with given ID, including inactive entries
:param string region: the region to execute this request on
:param string league_id: the league ID to query
:returns: LeagueListDTO
|
train
|
https://github.com/pseudonym117/Riot-Watcher/blob/21ab12453a0d824d67e30f5514d02a5c5a411dea/src/riotwatcher/_apis/LeagueApiV4.py#L56-L66
|
[
"def _raw_request(self, method_name, region, url, query_params):\n \"\"\"\n Sends a request through the BaseApi instance provided, injecting the provided endpoint_name\n into the method call, so the caller doesn't have to.\n\n :param string method_name: The name of the calling method\n :param string region: The region to execute this request on\n :param string url: The full URL to the method being requested.\n :param dict query_params: Query parameters to be provided in the HTTP request\n \"\"\"\n return self._base_api.raw_request(\n self._endpoint_name, method_name, region, url, query_params\n )\n"
] |
class LeagueApiV4(NamedEndpoint):
"""
This class wraps the League-v4 Api calls provided by the Riot API.
See https://developer.riotgames.com/api-methods/#league-v4/ for more detailed information
"""
def __init__(self, base_api):
"""
Initialize a new LeagueApiV4 which uses the provided base_api
:param BaseApi base_api: the root API object to use for making all requests.
"""
super(LeagueApiV4, self).__init__(base_api, self.__class__.__name__)
def challenger_by_queue(self, region, queue):
"""
Get the challenger league for a given queue.
:param string region: the region to execute this request on
:param string queue: the queue to get the challenger players for
:returns: LeagueListDTO
"""
url, query = LeagueApiV4Urls.challenger_by_queue(region=region, queue=queue)
return self._raw_request(self.challenger_by_queue.__name__, region, url, query)
def grandmaster_by_queue(self, region, queue):
"""
Get the grandmaster league for a given queue.
:param string region: the region to execute this request on
:param string queue: the queue to get the grandmaster players for
:returns: LeagueListDTO
"""
url, query = LeagueApiV4Urls.grandmaster_by_queue(region=region, queue=queue)
return self._raw_request(self.grandmaster_by_queue.__name__, region, url, query)
def masters_by_queue(self, region, queue):
"""
Get the master league for a given queue.
:param string region: the region to execute this request on
:param string queue: the queue to get the master players for
:returns: LeagueListDTO
"""
url, query = LeagueApiV4Urls.master_by_queue(region=region, queue=queue)
return self._raw_request(self.masters_by_queue.__name__, region, url, query)
def by_summoner(self, region, encrypted_summoner_id):
"""
Get league entries in all queues for a given summoner ID
:param string region: the region to execute this request on
:param string encrypted_summoner_id: the summoner ID to query
:returns: Set[LeagueEntryDTO]
"""
url, query = LeagueApiV4Urls.by_summoner(
region=region, encrypted_summoner_id=encrypted_summoner_id
)
return self._raw_request(self.by_summoner.__name__, region, url, query)
def entries(self, region, queue, tier, division):
"""
Get all the league entries
:param string region: the region to execute this request on
:param string queue: the queue to query, i.e. RANKED_SOLO_5x5
:param string tier: the tier to query, i.e. DIAMOND
:param string division: the division to query, i.e. III
:returns: Set[LeagueEntryDTO]
"""
url, query = LeagueApiV4Urls.entries(
region=region, queue=queue, tier=tier, division=division
)
return self._raw_request(self.entries.__name__, region, url, query)
def positions_by_summoner(self, region, encrypted_summoner_id):
"""
DEPRECATED
Get league positions in all queues for a given summoner ID
:param string region: the region to execute this request on
:param string encrypted_summoner_id: the summoner ID to query
:returns: Set[LeaguePositionDTO]
"""
url, query = LeagueApiV4Urls.positions_by_summoner(
region=region, encrypted_summoner_id=encrypted_summoner_id
)
return self._raw_request(
self.positions_by_summoner.__name__, region, url, query
)
|
pseudonym117/Riot-Watcher
|
src/riotwatcher/_apis/LeagueApiV4.py
|
LeagueApiV4.entries
|
python
|
def entries(self, region, queue, tier, division):
url, query = LeagueApiV4Urls.entries(
region=region, queue=queue, tier=tier, division=division
)
return self._raw_request(self.entries.__name__, region, url, query)
|
Get all the league entries
:param string region: the region to execute this request on
:param string queue: the queue to query, i.e. RANKED_SOLO_5x5
:param string tier: the tier to query, i.e. DIAMOND
:param string division: the division to query, i.e. III
:returns: Set[LeagueEntryDTO]
|
train
|
https://github.com/pseudonym117/Riot-Watcher/blob/21ab12453a0d824d67e30f5514d02a5c5a411dea/src/riotwatcher/_apis/LeagueApiV4.py#L82-L96
|
[
"def _raw_request(self, method_name, region, url, query_params):\n \"\"\"\n Sends a request through the BaseApi instance provided, injecting the provided endpoint_name\n into the method call, so the caller doesn't have to.\n\n :param string method_name: The name of the calling method\n :param string region: The region to execute this request on\n :param string url: The full URL to the method being requested.\n :param dict query_params: Query parameters to be provided in the HTTP request\n \"\"\"\n return self._base_api.raw_request(\n self._endpoint_name, method_name, region, url, query_params\n )\n"
] |
class LeagueApiV4(NamedEndpoint):
"""
This class wraps the League-v4 Api calls provided by the Riot API.
See https://developer.riotgames.com/api-methods/#league-v4/ for more detailed information
"""
def __init__(self, base_api):
"""
Initialize a new LeagueApiV4 which uses the provided base_api
:param BaseApi base_api: the root API object to use for making all requests.
"""
super(LeagueApiV4, self).__init__(base_api, self.__class__.__name__)
def challenger_by_queue(self, region, queue):
"""
Get the challenger league for a given queue.
:param string region: the region to execute this request on
:param string queue: the queue to get the challenger players for
:returns: LeagueListDTO
"""
url, query = LeagueApiV4Urls.challenger_by_queue(region=region, queue=queue)
return self._raw_request(self.challenger_by_queue.__name__, region, url, query)
def grandmaster_by_queue(self, region, queue):
"""
Get the grandmaster league for a given queue.
:param string region: the region to execute this request on
:param string queue: the queue to get the grandmaster players for
:returns: LeagueListDTO
"""
url, query = LeagueApiV4Urls.grandmaster_by_queue(region=region, queue=queue)
return self._raw_request(self.grandmaster_by_queue.__name__, region, url, query)
def masters_by_queue(self, region, queue):
"""
Get the master league for a given queue.
:param string region: the region to execute this request on
:param string queue: the queue to get the master players for
:returns: LeagueListDTO
"""
url, query = LeagueApiV4Urls.master_by_queue(region=region, queue=queue)
return self._raw_request(self.masters_by_queue.__name__, region, url, query)
def by_id(self, region, league_id):
"""
Get league with given ID, including inactive entries
:param string region: the region to execute this request on
:param string league_id: the league ID to query
:returns: LeagueListDTO
"""
url, query = LeagueApiV4Urls.by_id(region=region, league_id=league_id)
return self._raw_request(self.by_id.__name__, region, url, query)
def by_summoner(self, region, encrypted_summoner_id):
"""
Get league entries in all queues for a given summoner ID
:param string region: the region to execute this request on
:param string encrypted_summoner_id: the summoner ID to query
:returns: Set[LeagueEntryDTO]
"""
url, query = LeagueApiV4Urls.by_summoner(
region=region, encrypted_summoner_id=encrypted_summoner_id
)
return self._raw_request(self.by_summoner.__name__, region, url, query)
def positions_by_summoner(self, region, encrypted_summoner_id):
"""
DEPRECATED
Get league positions in all queues for a given summoner ID
:param string region: the region to execute this request on
:param string encrypted_summoner_id: the summoner ID to query
:returns: Set[LeaguePositionDTO]
"""
url, query = LeagueApiV4Urls.positions_by_summoner(
region=region, encrypted_summoner_id=encrypted_summoner_id
)
return self._raw_request(
self.positions_by_summoner.__name__, region, url, query
)
|
pseudonym117/Riot-Watcher
|
src/riotwatcher/_apis/ChampionApiV3.py
|
ChampionApiV3.rotations
|
python
|
def rotations(self, region):
url, query = ChampionApiV3Urls.rotations(region=region)
return self._raw_request(self.rotations.__name__, region, url, query)
|
Returns champion rotations, including free-to-play and low-level free-to-play rotations.
:returns: ChampionInfo
|
train
|
https://github.com/pseudonym117/Riot-Watcher/blob/21ab12453a0d824d67e30f5514d02a5c5a411dea/src/riotwatcher/_apis/ChampionApiV3.py#L20-L27
|
[
"def _raw_request(self, method_name, region, url, query_params):\n \"\"\"\n Sends a request through the BaseApi instance provided, injecting the provided endpoint_name\n into the method call, so the caller doesn't have to.\n\n :param string method_name: The name of the calling method\n :param string region: The region to execute this request on\n :param string url: The full URL to the method being requested.\n :param dict query_params: Query parameters to be provided in the HTTP request\n \"\"\"\n return self._base_api.raw_request(\n self._endpoint_name, method_name, region, url, query_params\n )\n"
] |
class ChampionApiV3(NamedEndpoint):
"""
This class wraps the Champion-v3 Api calls provided by the Riot API.
See https://developer.riotgames.com/api-methods/#champion-v3 for more detailed information
"""
def __init__(self, base_api):
"""
Initialize a new ChampionApiV3 which uses the provided base_api
:param BaseApi base_api: the root API object to use for making all requests.
"""
super(ChampionApiV3, self).__init__(base_api, self.__class__.__name__)
|
johnwmillr/LyricsGenius
|
lyricsgenius/api.py
|
API._make_request
|
python
|
def _make_request(self, path, method='GET', params_=None):
uri = self.api_root + path
if params_:
params_['text_format'] = self.response_format
else:
params_ = {'text_format': self.response_format}
# Make the request
response = None
try:
response = self._session.request(method, uri,
timeout=self.timeout,
params=params_)
except Timeout as e:
print("Timeout raised and caught:\n{e}".format(e=e))
# Enforce rate limiting
time.sleep(max(self._SLEEP_MIN, self.sleep_time))
return response.json()['response'] if response else None
|
Make a request to the API
|
train
|
https://github.com/johnwmillr/LyricsGenius/blob/e36482f7c42235037f3b9b7013edcd54141124e3/lyricsgenius/api.py#L50-L69
| null |
class API(object):
"""Genius API"""
# Create a persistent requests connection
_session = requests.Session()
_session.headers = {'application': 'LyricsGenius',
'User-Agent': 'https://github.com/johnwmillr/LyricsGenius'}
_SLEEP_MIN = 0.2 # Enforce minimum wait time between API calls (seconds)
def __init__(self, client_access_token,
response_format='plain', timeout=5, sleep_time=0.5):
""" Genius API Constructor
:param client_access_token: API key provided by Genius
:param response_format: API response format (dom, plain, html)
:param timeout: time before quitting on response (seconds)
:param sleep_time: time to wait between requests
"""
self._ACCESS_TOKEN = client_access_token
self._session.headers['authorization'] = 'Bearer ' + self._ACCESS_TOKEN
self.response_format = response_format.lower()
self.api_root = 'https://api.genius.com/'
self.timeout = timeout
self.sleep_time = sleep_time
def get_song(self, id_):
"""Data for a specific song."""
endpoint = "songs/{id}".format(id=id_)
return self._make_request(endpoint)
def get_artist(self, id_):
"""Data for a specific artist."""
endpoint = "artists/{id}".format(id=id_)
return self._make_request(endpoint)
def get_artist_songs(self, id_, sort='title', per_page=20, page=1):
"""Documents (songs) for the artist specified."""
endpoint = "artists/{id}/songs".format(id=id_)
params = {'sort': sort, 'per_page': per_page, 'page': page}
return self._make_request(endpoint, params_=params)
def search_genius(self, search_term):
"""Search documents hosted on Genius."""
endpoint = "search/"
params = {'q': search_term}
return self._make_request(endpoint, params_=params)
def search_genius_web(self, search_term, per_page=5):
"""Use the web-version of Genius search"""
endpoint = "search/multi?"
params = {'per_page': per_page, 'q': search_term}
# This endpoint is not part of the API, requires different formatting
url = "https://genius.com/api/" + endpoint + urlencode(params)
response = requests.get(url, timeout=self.timeout)
time.sleep(max(self._SLEEP_MIN, self.sleep_time))
return response.json()['response'] if response else None
def get_annotation(self, id_):
"""Data for a specific annotation."""
endpoint = "annotations/{id}".format(id=id_)
return self._make_request(endpoint)
|
johnwmillr/LyricsGenius
|
lyricsgenius/api.py
|
API.get_song
|
python
|
def get_song(self, id_):
endpoint = "songs/{id}".format(id=id_)
return self._make_request(endpoint)
|
Data for a specific song.
|
train
|
https://github.com/johnwmillr/LyricsGenius/blob/e36482f7c42235037f3b9b7013edcd54141124e3/lyricsgenius/api.py#L71-L74
|
[
"def _make_request(self, path, method='GET', params_=None):\n \"\"\"Make a request to the API\"\"\"\n uri = self.api_root + path\n if params_:\n params_['text_format'] = self.response_format\n else:\n params_ = {'text_format': self.response_format}\n\n # Make the request\n response = None\n try:\n response = self._session.request(method, uri,\n timeout=self.timeout,\n params=params_)\n except Timeout as e:\n print(\"Timeout raised and caught:\\n{e}\".format(e=e))\n\n # Enforce rate limiting\n time.sleep(max(self._SLEEP_MIN, self.sleep_time))\n return response.json()['response'] if response else None\n"
] |
class API(object):
"""Genius API"""
# Create a persistent requests connection
_session = requests.Session()
_session.headers = {'application': 'LyricsGenius',
'User-Agent': 'https://github.com/johnwmillr/LyricsGenius'}
_SLEEP_MIN = 0.2 # Enforce minimum wait time between API calls (seconds)
def __init__(self, client_access_token,
response_format='plain', timeout=5, sleep_time=0.5):
""" Genius API Constructor
:param client_access_token: API key provided by Genius
:param response_format: API response format (dom, plain, html)
:param timeout: time before quitting on response (seconds)
:param sleep_time: time to wait between requests
"""
self._ACCESS_TOKEN = client_access_token
self._session.headers['authorization'] = 'Bearer ' + self._ACCESS_TOKEN
self.response_format = response_format.lower()
self.api_root = 'https://api.genius.com/'
self.timeout = timeout
self.sleep_time = sleep_time
def _make_request(self, path, method='GET', params_=None):
"""Make a request to the API"""
uri = self.api_root + path
if params_:
params_['text_format'] = self.response_format
else:
params_ = {'text_format': self.response_format}
# Make the request
response = None
try:
response = self._session.request(method, uri,
timeout=self.timeout,
params=params_)
except Timeout as e:
print("Timeout raised and caught:\n{e}".format(e=e))
# Enforce rate limiting
time.sleep(max(self._SLEEP_MIN, self.sleep_time))
return response.json()['response'] if response else None
def get_artist(self, id_):
"""Data for a specific artist."""
endpoint = "artists/{id}".format(id=id_)
return self._make_request(endpoint)
def get_artist_songs(self, id_, sort='title', per_page=20, page=1):
"""Documents (songs) for the artist specified."""
endpoint = "artists/{id}/songs".format(id=id_)
params = {'sort': sort, 'per_page': per_page, 'page': page}
return self._make_request(endpoint, params_=params)
def search_genius(self, search_term):
"""Search documents hosted on Genius."""
endpoint = "search/"
params = {'q': search_term}
return self._make_request(endpoint, params_=params)
def search_genius_web(self, search_term, per_page=5):
"""Use the web-version of Genius search"""
endpoint = "search/multi?"
params = {'per_page': per_page, 'q': search_term}
# This endpoint is not part of the API, requires different formatting
url = "https://genius.com/api/" + endpoint + urlencode(params)
response = requests.get(url, timeout=self.timeout)
time.sleep(max(self._SLEEP_MIN, self.sleep_time))
return response.json()['response'] if response else None
def get_annotation(self, id_):
"""Data for a specific annotation."""
endpoint = "annotations/{id}".format(id=id_)
return self._make_request(endpoint)
|
johnwmillr/LyricsGenius
|
lyricsgenius/api.py
|
API.get_artist
|
python
|
def get_artist(self, id_):
endpoint = "artists/{id}".format(id=id_)
return self._make_request(endpoint)
|
Data for a specific artist.
|
train
|
https://github.com/johnwmillr/LyricsGenius/blob/e36482f7c42235037f3b9b7013edcd54141124e3/lyricsgenius/api.py#L76-L79
|
[
"def _make_request(self, path, method='GET', params_=None):\n \"\"\"Make a request to the API\"\"\"\n uri = self.api_root + path\n if params_:\n params_['text_format'] = self.response_format\n else:\n params_ = {'text_format': self.response_format}\n\n # Make the request\n response = None\n try:\n response = self._session.request(method, uri,\n timeout=self.timeout,\n params=params_)\n except Timeout as e:\n print(\"Timeout raised and caught:\\n{e}\".format(e=e))\n\n # Enforce rate limiting\n time.sleep(max(self._SLEEP_MIN, self.sleep_time))\n return response.json()['response'] if response else None\n"
] |
class API(object):
"""Genius API"""
# Create a persistent requests connection
_session = requests.Session()
_session.headers = {'application': 'LyricsGenius',
'User-Agent': 'https://github.com/johnwmillr/LyricsGenius'}
_SLEEP_MIN = 0.2 # Enforce minimum wait time between API calls (seconds)
def __init__(self, client_access_token,
response_format='plain', timeout=5, sleep_time=0.5):
""" Genius API Constructor
:param client_access_token: API key provided by Genius
:param response_format: API response format (dom, plain, html)
:param timeout: time before quitting on response (seconds)
:param sleep_time: time to wait between requests
"""
self._ACCESS_TOKEN = client_access_token
self._session.headers['authorization'] = 'Bearer ' + self._ACCESS_TOKEN
self.response_format = response_format.lower()
self.api_root = 'https://api.genius.com/'
self.timeout = timeout
self.sleep_time = sleep_time
def _make_request(self, path, method='GET', params_=None):
"""Make a request to the API"""
uri = self.api_root + path
if params_:
params_['text_format'] = self.response_format
else:
params_ = {'text_format': self.response_format}
# Make the request
response = None
try:
response = self._session.request(method, uri,
timeout=self.timeout,
params=params_)
except Timeout as e:
print("Timeout raised and caught:\n{e}".format(e=e))
# Enforce rate limiting
time.sleep(max(self._SLEEP_MIN, self.sleep_time))
return response.json()['response'] if response else None
def get_song(self, id_):
"""Data for a specific song."""
endpoint = "songs/{id}".format(id=id_)
return self._make_request(endpoint)
def get_artist_songs(self, id_, sort='title', per_page=20, page=1):
"""Documents (songs) for the artist specified."""
endpoint = "artists/{id}/songs".format(id=id_)
params = {'sort': sort, 'per_page': per_page, 'page': page}
return self._make_request(endpoint, params_=params)
def search_genius(self, search_term):
"""Search documents hosted on Genius."""
endpoint = "search/"
params = {'q': search_term}
return self._make_request(endpoint, params_=params)
def search_genius_web(self, search_term, per_page=5):
"""Use the web-version of Genius search"""
endpoint = "search/multi?"
params = {'per_page': per_page, 'q': search_term}
# This endpoint is not part of the API, requires different formatting
url = "https://genius.com/api/" + endpoint + urlencode(params)
response = requests.get(url, timeout=self.timeout)
time.sleep(max(self._SLEEP_MIN, self.sleep_time))
return response.json()['response'] if response else None
def get_annotation(self, id_):
"""Data for a specific annotation."""
endpoint = "annotations/{id}".format(id=id_)
return self._make_request(endpoint)
|
johnwmillr/LyricsGenius
|
lyricsgenius/api.py
|
API.get_artist_songs
|
python
|
def get_artist_songs(self, id_, sort='title', per_page=20, page=1):
endpoint = "artists/{id}/songs".format(id=id_)
params = {'sort': sort, 'per_page': per_page, 'page': page}
return self._make_request(endpoint, params_=params)
|
Documents (songs) for the artist specified.
|
train
|
https://github.com/johnwmillr/LyricsGenius/blob/e36482f7c42235037f3b9b7013edcd54141124e3/lyricsgenius/api.py#L81-L85
|
[
"def _make_request(self, path, method='GET', params_=None):\n \"\"\"Make a request to the API\"\"\"\n uri = self.api_root + path\n if params_:\n params_['text_format'] = self.response_format\n else:\n params_ = {'text_format': self.response_format}\n\n # Make the request\n response = None\n try:\n response = self._session.request(method, uri,\n timeout=self.timeout,\n params=params_)\n except Timeout as e:\n print(\"Timeout raised and caught:\\n{e}\".format(e=e))\n\n # Enforce rate limiting\n time.sleep(max(self._SLEEP_MIN, self.sleep_time))\n return response.json()['response'] if response else None\n"
] |
class API(object):
"""Genius API"""
# Create a persistent requests connection
_session = requests.Session()
_session.headers = {'application': 'LyricsGenius',
'User-Agent': 'https://github.com/johnwmillr/LyricsGenius'}
_SLEEP_MIN = 0.2 # Enforce minimum wait time between API calls (seconds)
def __init__(self, client_access_token,
response_format='plain', timeout=5, sleep_time=0.5):
""" Genius API Constructor
:param client_access_token: API key provided by Genius
:param response_format: API response format (dom, plain, html)
:param timeout: time before quitting on response (seconds)
:param sleep_time: time to wait between requests
"""
self._ACCESS_TOKEN = client_access_token
self._session.headers['authorization'] = 'Bearer ' + self._ACCESS_TOKEN
self.response_format = response_format.lower()
self.api_root = 'https://api.genius.com/'
self.timeout = timeout
self.sleep_time = sleep_time
def _make_request(self, path, method='GET', params_=None):
"""Make a request to the API"""
uri = self.api_root + path
if params_:
params_['text_format'] = self.response_format
else:
params_ = {'text_format': self.response_format}
# Make the request
response = None
try:
response = self._session.request(method, uri,
timeout=self.timeout,
params=params_)
except Timeout as e:
print("Timeout raised and caught:\n{e}".format(e=e))
# Enforce rate limiting
time.sleep(max(self._SLEEP_MIN, self.sleep_time))
return response.json()['response'] if response else None
def get_song(self, id_):
"""Data for a specific song."""
endpoint = "songs/{id}".format(id=id_)
return self._make_request(endpoint)
def get_artist(self, id_):
"""Data for a specific artist."""
endpoint = "artists/{id}".format(id=id_)
return self._make_request(endpoint)
def search_genius(self, search_term):
"""Search documents hosted on Genius."""
endpoint = "search/"
params = {'q': search_term}
return self._make_request(endpoint, params_=params)
def search_genius_web(self, search_term, per_page=5):
"""Use the web-version of Genius search"""
endpoint = "search/multi?"
params = {'per_page': per_page, 'q': search_term}
# This endpoint is not part of the API, requires different formatting
url = "https://genius.com/api/" + endpoint + urlencode(params)
response = requests.get(url, timeout=self.timeout)
time.sleep(max(self._SLEEP_MIN, self.sleep_time))
return response.json()['response'] if response else None
def get_annotation(self, id_):
"""Data for a specific annotation."""
endpoint = "annotations/{id}".format(id=id_)
return self._make_request(endpoint)
|
johnwmillr/LyricsGenius
|
lyricsgenius/api.py
|
API.search_genius
|
python
|
def search_genius(self, search_term):
endpoint = "search/"
params = {'q': search_term}
return self._make_request(endpoint, params_=params)
|
Search documents hosted on Genius.
|
train
|
https://github.com/johnwmillr/LyricsGenius/blob/e36482f7c42235037f3b9b7013edcd54141124e3/lyricsgenius/api.py#L87-L91
|
[
"def _make_request(self, path, method='GET', params_=None):\n \"\"\"Make a request to the API\"\"\"\n uri = self.api_root + path\n if params_:\n params_['text_format'] = self.response_format\n else:\n params_ = {'text_format': self.response_format}\n\n # Make the request\n response = None\n try:\n response = self._session.request(method, uri,\n timeout=self.timeout,\n params=params_)\n except Timeout as e:\n print(\"Timeout raised and caught:\\n{e}\".format(e=e))\n\n # Enforce rate limiting\n time.sleep(max(self._SLEEP_MIN, self.sleep_time))\n return response.json()['response'] if response else None\n"
] |
class API(object):
"""Genius API"""
# Create a persistent requests connection
_session = requests.Session()
_session.headers = {'application': 'LyricsGenius',
'User-Agent': 'https://github.com/johnwmillr/LyricsGenius'}
_SLEEP_MIN = 0.2 # Enforce minimum wait time between API calls (seconds)
def __init__(self, client_access_token,
response_format='plain', timeout=5, sleep_time=0.5):
""" Genius API Constructor
:param client_access_token: API key provided by Genius
:param response_format: API response format (dom, plain, html)
:param timeout: time before quitting on response (seconds)
:param sleep_time: time to wait between requests
"""
self._ACCESS_TOKEN = client_access_token
self._session.headers['authorization'] = 'Bearer ' + self._ACCESS_TOKEN
self.response_format = response_format.lower()
self.api_root = 'https://api.genius.com/'
self.timeout = timeout
self.sleep_time = sleep_time
def _make_request(self, path, method='GET', params_=None):
"""Make a request to the API"""
uri = self.api_root + path
if params_:
params_['text_format'] = self.response_format
else:
params_ = {'text_format': self.response_format}
# Make the request
response = None
try:
response = self._session.request(method, uri,
timeout=self.timeout,
params=params_)
except Timeout as e:
print("Timeout raised and caught:\n{e}".format(e=e))
# Enforce rate limiting
time.sleep(max(self._SLEEP_MIN, self.sleep_time))
return response.json()['response'] if response else None
def get_song(self, id_):
"""Data for a specific song."""
endpoint = "songs/{id}".format(id=id_)
return self._make_request(endpoint)
def get_artist(self, id_):
"""Data for a specific artist."""
endpoint = "artists/{id}".format(id=id_)
return self._make_request(endpoint)
def get_artist_songs(self, id_, sort='title', per_page=20, page=1):
"""Documents (songs) for the artist specified."""
endpoint = "artists/{id}/songs".format(id=id_)
params = {'sort': sort, 'per_page': per_page, 'page': page}
return self._make_request(endpoint, params_=params)
def search_genius_web(self, search_term, per_page=5):
"""Use the web-version of Genius search"""
endpoint = "search/multi?"
params = {'per_page': per_page, 'q': search_term}
# This endpoint is not part of the API, requires different formatting
url = "https://genius.com/api/" + endpoint + urlencode(params)
response = requests.get(url, timeout=self.timeout)
time.sleep(max(self._SLEEP_MIN, self.sleep_time))
return response.json()['response'] if response else None
def get_annotation(self, id_):
"""Data for a specific annotation."""
endpoint = "annotations/{id}".format(id=id_)
return self._make_request(endpoint)
|
johnwmillr/LyricsGenius
|
lyricsgenius/api.py
|
API.search_genius_web
|
python
|
def search_genius_web(self, search_term, per_page=5):
endpoint = "search/multi?"
params = {'per_page': per_page, 'q': search_term}
# This endpoint is not part of the API, requires different formatting
url = "https://genius.com/api/" + endpoint + urlencode(params)
response = requests.get(url, timeout=self.timeout)
time.sleep(max(self._SLEEP_MIN, self.sleep_time))
return response.json()['response'] if response else None
|
Use the web-version of Genius search
|
train
|
https://github.com/johnwmillr/LyricsGenius/blob/e36482f7c42235037f3b9b7013edcd54141124e3/lyricsgenius/api.py#L93-L102
| null |
class API(object):
"""Genius API"""
# Create a persistent requests connection
_session = requests.Session()
_session.headers = {'application': 'LyricsGenius',
'User-Agent': 'https://github.com/johnwmillr/LyricsGenius'}
_SLEEP_MIN = 0.2 # Enforce minimum wait time between API calls (seconds)
def __init__(self, client_access_token,
response_format='plain', timeout=5, sleep_time=0.5):
""" Genius API Constructor
:param client_access_token: API key provided by Genius
:param response_format: API response format (dom, plain, html)
:param timeout: time before quitting on response (seconds)
:param sleep_time: time to wait between requests
"""
self._ACCESS_TOKEN = client_access_token
self._session.headers['authorization'] = 'Bearer ' + self._ACCESS_TOKEN
self.response_format = response_format.lower()
self.api_root = 'https://api.genius.com/'
self.timeout = timeout
self.sleep_time = sleep_time
def _make_request(self, path, method='GET', params_=None):
"""Make a request to the API"""
uri = self.api_root + path
if params_:
params_['text_format'] = self.response_format
else:
params_ = {'text_format': self.response_format}
# Make the request
response = None
try:
response = self._session.request(method, uri,
timeout=self.timeout,
params=params_)
except Timeout as e:
print("Timeout raised and caught:\n{e}".format(e=e))
# Enforce rate limiting
time.sleep(max(self._SLEEP_MIN, self.sleep_time))
return response.json()['response'] if response else None
def get_song(self, id_):
"""Data for a specific song."""
endpoint = "songs/{id}".format(id=id_)
return self._make_request(endpoint)
def get_artist(self, id_):
"""Data for a specific artist."""
endpoint = "artists/{id}".format(id=id_)
return self._make_request(endpoint)
def get_artist_songs(self, id_, sort='title', per_page=20, page=1):
"""Documents (songs) for the artist specified."""
endpoint = "artists/{id}/songs".format(id=id_)
params = {'sort': sort, 'per_page': per_page, 'page': page}
return self._make_request(endpoint, params_=params)
def search_genius(self, search_term):
"""Search documents hosted on Genius."""
endpoint = "search/"
params = {'q': search_term}
return self._make_request(endpoint, params_=params)
def get_annotation(self, id_):
"""Data for a specific annotation."""
endpoint = "annotations/{id}".format(id=id_)
return self._make_request(endpoint)
|
johnwmillr/LyricsGenius
|
lyricsgenius/api.py
|
API.get_annotation
|
python
|
def get_annotation(self, id_):
endpoint = "annotations/{id}".format(id=id_)
return self._make_request(endpoint)
|
Data for a specific annotation.
|
train
|
https://github.com/johnwmillr/LyricsGenius/blob/e36482f7c42235037f3b9b7013edcd54141124e3/lyricsgenius/api.py#L104-L107
|
[
"def _make_request(self, path, method='GET', params_=None):\n \"\"\"Make a request to the API\"\"\"\n uri = self.api_root + path\n if params_:\n params_['text_format'] = self.response_format\n else:\n params_ = {'text_format': self.response_format}\n\n # Make the request\n response = None\n try:\n response = self._session.request(method, uri,\n timeout=self.timeout,\n params=params_)\n except Timeout as e:\n print(\"Timeout raised and caught:\\n{e}\".format(e=e))\n\n # Enforce rate limiting\n time.sleep(max(self._SLEEP_MIN, self.sleep_time))\n return response.json()['response'] if response else None\n"
] |
class API(object):
"""Genius API"""
# Create a persistent requests connection
_session = requests.Session()
_session.headers = {'application': 'LyricsGenius',
'User-Agent': 'https://github.com/johnwmillr/LyricsGenius'}
_SLEEP_MIN = 0.2 # Enforce minimum wait time between API calls (seconds)
def __init__(self, client_access_token,
response_format='plain', timeout=5, sleep_time=0.5):
""" Genius API Constructor
:param client_access_token: API key provided by Genius
:param response_format: API response format (dom, plain, html)
:param timeout: time before quitting on response (seconds)
:param sleep_time: time to wait between requests
"""
self._ACCESS_TOKEN = client_access_token
self._session.headers['authorization'] = 'Bearer ' + self._ACCESS_TOKEN
self.response_format = response_format.lower()
self.api_root = 'https://api.genius.com/'
self.timeout = timeout
self.sleep_time = sleep_time
def _make_request(self, path, method='GET', params_=None):
"""Make a request to the API"""
uri = self.api_root + path
if params_:
params_['text_format'] = self.response_format
else:
params_ = {'text_format': self.response_format}
# Make the request
response = None
try:
response = self._session.request(method, uri,
timeout=self.timeout,
params=params_)
except Timeout as e:
print("Timeout raised and caught:\n{e}".format(e=e))
# Enforce rate limiting
time.sleep(max(self._SLEEP_MIN, self.sleep_time))
return response.json()['response'] if response else None
def get_song(self, id_):
"""Data for a specific song."""
endpoint = "songs/{id}".format(id=id_)
return self._make_request(endpoint)
def get_artist(self, id_):
"""Data for a specific artist."""
endpoint = "artists/{id}".format(id=id_)
return self._make_request(endpoint)
def get_artist_songs(self, id_, sort='title', per_page=20, page=1):
"""Documents (songs) for the artist specified."""
endpoint = "artists/{id}/songs".format(id=id_)
params = {'sort': sort, 'per_page': per_page, 'page': page}
return self._make_request(endpoint, params_=params)
def search_genius(self, search_term):
"""Search documents hosted on Genius."""
endpoint = "search/"
params = {'q': search_term}
return self._make_request(endpoint, params_=params)
def search_genius_web(self, search_term, per_page=5):
"""Use the web-version of Genius search"""
endpoint = "search/multi?"
params = {'per_page': per_page, 'q': search_term}
# This endpoint is not part of the API, requires different formatting
url = "https://genius.com/api/" + endpoint + urlencode(params)
response = requests.get(url, timeout=self.timeout)
time.sleep(max(self._SLEEP_MIN, self.sleep_time))
return response.json()['response'] if response else None
|
johnwmillr/LyricsGenius
|
lyricsgenius/api.py
|
Genius._scrape_song_lyrics_from_url
|
python
|
def _scrape_song_lyrics_from_url(self, url):
page = requests.get(url)
if page.status_code == 404:
return None
# Scrape the song lyrics from the HTML
html = BeautifulSoup(page.text, "html.parser")
div = html.find("div", class_="lyrics")
if not div:
return None # Sometimes the lyrics section isn't found
# Scrape lyrics if proper section was found on page
lyrics = div.get_text()
if self.remove_section_headers: # Remove [Verse], [Bridge], etc.
lyrics = re.sub('(\[.*?\])*', '', lyrics)
lyrics = re.sub('\n{2}', '\n', lyrics) # Gaps between verses
return lyrics.strip("\n")
|
Use BeautifulSoup to scrape song info off of a Genius song URL
:param url: URL for the web page to scrape lyrics from
|
train
|
https://github.com/johnwmillr/LyricsGenius/blob/e36482f7c42235037f3b9b7013edcd54141124e3/lyricsgenius/api.py#L134-L153
| null |
class Genius(API):
"""User-level interface with the Genius.com API."""
def __init__(self, client_access_token,
response_format='plain', timeout=5, sleep_time=0.5,
verbose=True, remove_section_headers=False,
skip_non_songs=True, excluded_terms=[],
replace_default_terms=False):
""" Genius Client Constructor
:param verbose: Turn printed messages on or off (bool)
:param remove_section_headers: If True, removes [Chorus], [Bridge], etc. headers from lyrics
:param skip_non_songs: If True, attempts to skip non-songs (e.g. track listings)
:param excluded_terms: (list) extra terms for flagging results as non-lyrics
:param replace_default_terms: if True, replaces default excluded terms with user's
"""
super().__init__(client_access_token, response_format, timeout, sleep_time)
self.verbose = verbose
self.remove_section_headers = remove_section_headers
self.skip_non_songs = skip_non_songs
self.excluded_terms = excluded_terms
self.replace_default_terms = replace_default_terms
def _clean_str(self, s):
""" Returns a lowercase string with punctuation and bad chars removed
:param s: string to clean
"""
return s.translate(str.maketrans('', '', punctuation)).replace('\u200b', " ").strip().lower()
def _result_is_lyrics(self, song_title):
""" Returns False if result from Genius is not actually song lyrics
Set the `excluded_terms` and `replace_default_terms` as
instance variables within the Genius class.
"""
default_terms = ['track\\s?list', 'album art(work)?', 'liner notes',
'booklet', 'credits', 'interview', 'skit',
'instrumental', 'setlist']
if self.excluded_terms:
if self.replace_default_terms:
default_terms = self.excluded_terms
else:
default_terms.extend(self.excluded_terms)
expression = r"".join(["({})|".format(term) for term in default_terms]).strip('|')
regex = re.compile(expression, re.IGNORECASE)
return not regex.search(self._clean_str(song_title))
def _get_item_from_search_response(self, response, type_):
""" Returns either a Song or Artist result from search_genius_web """
sections = sorted(response['sections'],
key=lambda sect: sect['type'] == type_,
reverse=True)
for section in sections:
hits = [hit for hit in section['hits'] if hit['type'] == type_]
if hits:
return hits[0]['result']
def _result_is_match(self, result, title, artist=None):
""" Returns True if search result matches searched song """
result_title = self._clean_str(result['title'])
title_is_match = result_title == self._clean_str(title)
if not artist:
return title_is_match
result_artist = self._clean_str(result['primary_artist']['name'])
return title_is_match and result_artist == self._clean_str(artist)
def search_song(self, title, artist="", get_full_info=True):
""" Search Genius.com for lyrics to a specific song
:param title: Song title to search for
:param artist: Name of the artist
:param get_full_info: Get full info for each song (slower)
"""
# Search the Genius API for the specified song
if self.verbose:
if artist:
print('Searching for "{s}" by {a}...'.format(s=title, a=artist))
else:
print('Searching for "{s}"...'.format(s=title))
search_term = "{s} {a}".format(s=title, a=artist).strip()
response = self.search_genius_web(search_term)
# Otherwise, move forward with processing the search results
result = self._get_item_from_search_response(response, type_="song")
# Exit search if there were no results returned from API
if not result:
if self.verbose:
print("No results found for: '{s}'".format(s=search_term))
return None
# Reject non-songs (Liner notes, track lists, etc.)
valid = self._result_is_lyrics(result['title']) if self.skip_non_songs else True
if not valid:
if self.verbose:
print('Specified song does not contain lyrics. Rejecting.')
return None
# Download full song info (an API call) unless told not to by user
song_info = result.copy()
if get_full_info:
song_info.update(self.get_song(result['id'])['song'])
lyrics = self._scrape_song_lyrics_from_url(song_info['url'])
# Skip results when URL is a 404 or lyrics are missing
if not lyrics:
if self.verbose:
print('Specified song does not have a valid URL with lyrics. Rejecting.')
return None
# Return a Song object with lyrics if we've made it this far
song = Song(song_info, lyrics)
if self.verbose:
print('Done.')
return song
def search_artist(self, artist_name, max_songs=None,
sort='popularity', per_page=20, get_full_info=True):
"""Search Genius.com for songs by the specified artist.
Returns an Artist object containing artist's songs.
:param artist_name: Name of the artist to search for
:param max_songs: Maximum number of songs to search for
:param sort: Sort by 'title' or 'popularity'
:param per_page: Number of results to return per search page
:param get_full_info: Get full info for each song (slower)
"""
if self.verbose:
print('Searching for songs by {0}...\n'.format(artist_name))
# Perform a Genius API search for the artist
found_artist = None
response = self.search_genius_web(artist_name)
found_artist = self._get_item_from_search_response(response, type_="artist")
# Exit the search if we couldn't find an artist by the given name
if not found_artist:
if self.verbose:
print("No results found for '{a}'.".format(a=artist_name))
return None
# Assume the top search result is the intended artist
artist_id = found_artist['id']
artist_info = self.get_artist(artist_id)
found_name = artist_info['artist']['name']
if found_name != artist_name:
if self.verbose:
print("Changing artist name to '{a}'".format(a=found_name))
artist_name = found_name
# Create the Artist object
artist = Artist(artist_info)
# Download each song by artist, stored as Song objects in Artist object
page = 1
reached_max_songs = False
while not reached_max_songs:
songs_on_page = self.get_artist_songs(artist_id, sort, per_page, page)
# Loop through each song on page of search results
for song_info in songs_on_page['songs']:
# Check if song is valid (e.g. has title, contains lyrics)
has_title = ('title' in song_info)
has_lyrics = self._result_is_lyrics(song_info['title'])
valid = has_title and (has_lyrics or (not self.skip_non_songs))
# Reject non-song results (e.g. Linear Notes, Tracklists, etc.)
if not valid:
if self.verbose:
s = song_info['title'] if has_title else "MISSING TITLE"
print('"{s}" is not valid. Skipping.'.format(s=s))
continue
# Create the Song object from lyrics and metadata
lyrics = self._scrape_song_lyrics_from_url(song_info['url'])
if get_full_info:
info = self.get_song(song_info['id'])
else:
info = {'song': song_info}
song = Song(info, lyrics)
# Attempt to add the Song to the Artist
result = artist.add_song(song, verbose=False)
if result == 0 and self.verbose:
print('Song {n}: "{t}"'.format(n=artist.num_songs,
t=song.title))
# Exit search if the max number of songs has been met
reached_max_songs = max_songs and artist.num_songs >= max_songs
if reached_max_songs:
if self.verbose:
print('\nReached user-specified song limit ({m}).'.format(m=max_songs))
break
# Move on to next page of search results
page = songs_on_page['next_page']
if page is None:
break # Exit search when last page is reached
if self.verbose:
print('Done. Found {n} songs.'.format(n=artist.num_songs))
return artist
def save_artists(self, artists, filename="artist_lyrics", overwrite=False):
"""Save lyrics from multiple Artist objects as JSON object
:param artists: List of Artist objects to save lyrics from
:param filename: Name of output file (json)
:param overwrite: Overwrites preexisting file if True
"""
if isinstance(artists, Artist):
artists = [artists]
# Create a temporary directory for lyrics
start = time.time()
tmp_dir = 'tmp_lyrics'
if not os.path.isdir(tmp_dir):
os.mkdir(tmp_dir)
count = 0
else:
count = len(os.listdir(tmp_dir))
# Check if file already exists
if os.path.isfile(filename + ".json") and not overwrite:
msg = "{f} already exists. Overwrite?\n(y/n): ".format(f=filename)
if input(msg).lower() != "y":
print("Leaving file in place. Exiting.")
os.rmdir(tmp_dir)
return
# Extract each artist's lyrics in json format
all_lyrics = {'artists': []}
for n, artist in enumerate(artists):
if isinstance(artist, Artist):
all_lyrics['artists'].append({})
f = "tmp_{n}_{a}".format(n=count + n,
a=artist.name.replace(" ", ""))
tmp_file = os.path.join(tmp_dir, f)
if self.verbose:
print(tmp_file)
all_lyrics['artists'][-1] = artist.save_lyrics(overwrite=True)
# Save all of the lyrics
with open(filename + '.json', 'w') as outfile:
json.dump(all_lyrics, outfile)
# Delete the temporary directory
shutil.rmtree(tmp_dir)
elapsed = (time.time() - start) / 60 / 60
print("Time elapsed: {t} hours".format(t=elapsed))
|
johnwmillr/LyricsGenius
|
lyricsgenius/api.py
|
Genius._clean_str
|
python
|
def _clean_str(self, s):
return s.translate(str.maketrans('', '', punctuation)).replace('\u200b', " ").strip().lower()
|
Returns a lowercase string with punctuation and bad chars removed
:param s: string to clean
|
train
|
https://github.com/johnwmillr/LyricsGenius/blob/e36482f7c42235037f3b9b7013edcd54141124e3/lyricsgenius/api.py#L155-L159
| null |
class Genius(API):
"""User-level interface with the Genius.com API."""
def __init__(self, client_access_token,
response_format='plain', timeout=5, sleep_time=0.5,
verbose=True, remove_section_headers=False,
skip_non_songs=True, excluded_terms=[],
replace_default_terms=False):
""" Genius Client Constructor
:param verbose: Turn printed messages on or off (bool)
:param remove_section_headers: If True, removes [Chorus], [Bridge], etc. headers from lyrics
:param skip_non_songs: If True, attempts to skip non-songs (e.g. track listings)
:param excluded_terms: (list) extra terms for flagging results as non-lyrics
:param replace_default_terms: if True, replaces default excluded terms with user's
"""
super().__init__(client_access_token, response_format, timeout, sleep_time)
self.verbose = verbose
self.remove_section_headers = remove_section_headers
self.skip_non_songs = skip_non_songs
self.excluded_terms = excluded_terms
self.replace_default_terms = replace_default_terms
def _scrape_song_lyrics_from_url(self, url):
""" Use BeautifulSoup to scrape song info off of a Genius song URL
:param url: URL for the web page to scrape lyrics from
"""
page = requests.get(url)
if page.status_code == 404:
return None
# Scrape the song lyrics from the HTML
html = BeautifulSoup(page.text, "html.parser")
div = html.find("div", class_="lyrics")
if not div:
return None # Sometimes the lyrics section isn't found
# Scrape lyrics if proper section was found on page
lyrics = div.get_text()
if self.remove_section_headers: # Remove [Verse], [Bridge], etc.
lyrics = re.sub('(\[.*?\])*', '', lyrics)
lyrics = re.sub('\n{2}', '\n', lyrics) # Gaps between verses
return lyrics.strip("\n")
def _result_is_lyrics(self, song_title):
""" Returns False if result from Genius is not actually song lyrics
Set the `excluded_terms` and `replace_default_terms` as
instance variables within the Genius class.
"""
default_terms = ['track\\s?list', 'album art(work)?', 'liner notes',
'booklet', 'credits', 'interview', 'skit',
'instrumental', 'setlist']
if self.excluded_terms:
if self.replace_default_terms:
default_terms = self.excluded_terms
else:
default_terms.extend(self.excluded_terms)
expression = r"".join(["({})|".format(term) for term in default_terms]).strip('|')
regex = re.compile(expression, re.IGNORECASE)
return not regex.search(self._clean_str(song_title))
def _get_item_from_search_response(self, response, type_):
""" Returns either a Song or Artist result from search_genius_web """
sections = sorted(response['sections'],
key=lambda sect: sect['type'] == type_,
reverse=True)
for section in sections:
hits = [hit for hit in section['hits'] if hit['type'] == type_]
if hits:
return hits[0]['result']
def _result_is_match(self, result, title, artist=None):
""" Returns True if search result matches searched song """
result_title = self._clean_str(result['title'])
title_is_match = result_title == self._clean_str(title)
if not artist:
return title_is_match
result_artist = self._clean_str(result['primary_artist']['name'])
return title_is_match and result_artist == self._clean_str(artist)
def search_song(self, title, artist="", get_full_info=True):
""" Search Genius.com for lyrics to a specific song
:param title: Song title to search for
:param artist: Name of the artist
:param get_full_info: Get full info for each song (slower)
"""
# Search the Genius API for the specified song
if self.verbose:
if artist:
print('Searching for "{s}" by {a}...'.format(s=title, a=artist))
else:
print('Searching for "{s}"...'.format(s=title))
search_term = "{s} {a}".format(s=title, a=artist).strip()
response = self.search_genius_web(search_term)
# Otherwise, move forward with processing the search results
result = self._get_item_from_search_response(response, type_="song")
# Exit search if there were no results returned from API
if not result:
if self.verbose:
print("No results found for: '{s}'".format(s=search_term))
return None
# Reject non-songs (Liner notes, track lists, etc.)
valid = self._result_is_lyrics(result['title']) if self.skip_non_songs else True
if not valid:
if self.verbose:
print('Specified song does not contain lyrics. Rejecting.')
return None
# Download full song info (an API call) unless told not to by user
song_info = result.copy()
if get_full_info:
song_info.update(self.get_song(result['id'])['song'])
lyrics = self._scrape_song_lyrics_from_url(song_info['url'])
# Skip results when URL is a 404 or lyrics are missing
if not lyrics:
if self.verbose:
print('Specified song does not have a valid URL with lyrics. Rejecting.')
return None
# Return a Song object with lyrics if we've made it this far
song = Song(song_info, lyrics)
if self.verbose:
print('Done.')
return song
def search_artist(self, artist_name, max_songs=None,
sort='popularity', per_page=20, get_full_info=True):
"""Search Genius.com for songs by the specified artist.
Returns an Artist object containing artist's songs.
:param artist_name: Name of the artist to search for
:param max_songs: Maximum number of songs to search for
:param sort: Sort by 'title' or 'popularity'
:param per_page: Number of results to return per search page
:param get_full_info: Get full info for each song (slower)
"""
if self.verbose:
print('Searching for songs by {0}...\n'.format(artist_name))
# Perform a Genius API search for the artist
found_artist = None
response = self.search_genius_web(artist_name)
found_artist = self._get_item_from_search_response(response, type_="artist")
# Exit the search if we couldn't find an artist by the given name
if not found_artist:
if self.verbose:
print("No results found for '{a}'.".format(a=artist_name))
return None
# Assume the top search result is the intended artist
artist_id = found_artist['id']
artist_info = self.get_artist(artist_id)
found_name = artist_info['artist']['name']
if found_name != artist_name:
if self.verbose:
print("Changing artist name to '{a}'".format(a=found_name))
artist_name = found_name
# Create the Artist object
artist = Artist(artist_info)
# Download each song by artist, stored as Song objects in Artist object
page = 1
reached_max_songs = False
while not reached_max_songs:
songs_on_page = self.get_artist_songs(artist_id, sort, per_page, page)
# Loop through each song on page of search results
for song_info in songs_on_page['songs']:
# Check if song is valid (e.g. has title, contains lyrics)
has_title = ('title' in song_info)
has_lyrics = self._result_is_lyrics(song_info['title'])
valid = has_title and (has_lyrics or (not self.skip_non_songs))
# Reject non-song results (e.g. Linear Notes, Tracklists, etc.)
if not valid:
if self.verbose:
s = song_info['title'] if has_title else "MISSING TITLE"
print('"{s}" is not valid. Skipping.'.format(s=s))
continue
# Create the Song object from lyrics and metadata
lyrics = self._scrape_song_lyrics_from_url(song_info['url'])
if get_full_info:
info = self.get_song(song_info['id'])
else:
info = {'song': song_info}
song = Song(info, lyrics)
# Attempt to add the Song to the Artist
result = artist.add_song(song, verbose=False)
if result == 0 and self.verbose:
print('Song {n}: "{t}"'.format(n=artist.num_songs,
t=song.title))
# Exit search if the max number of songs has been met
reached_max_songs = max_songs and artist.num_songs >= max_songs
if reached_max_songs:
if self.verbose:
print('\nReached user-specified song limit ({m}).'.format(m=max_songs))
break
# Move on to next page of search results
page = songs_on_page['next_page']
if page is None:
break # Exit search when last page is reached
if self.verbose:
print('Done. Found {n} songs.'.format(n=artist.num_songs))
return artist
def save_artists(self, artists, filename="artist_lyrics", overwrite=False):
"""Save lyrics from multiple Artist objects as JSON object
:param artists: List of Artist objects to save lyrics from
:param filename: Name of output file (json)
:param overwrite: Overwrites preexisting file if True
"""
if isinstance(artists, Artist):
artists = [artists]
# Create a temporary directory for lyrics
start = time.time()
tmp_dir = 'tmp_lyrics'
if not os.path.isdir(tmp_dir):
os.mkdir(tmp_dir)
count = 0
else:
count = len(os.listdir(tmp_dir))
# Check if file already exists
if os.path.isfile(filename + ".json") and not overwrite:
msg = "{f} already exists. Overwrite?\n(y/n): ".format(f=filename)
if input(msg).lower() != "y":
print("Leaving file in place. Exiting.")
os.rmdir(tmp_dir)
return
# Extract each artist's lyrics in json format
all_lyrics = {'artists': []}
for n, artist in enumerate(artists):
if isinstance(artist, Artist):
all_lyrics['artists'].append({})
f = "tmp_{n}_{a}".format(n=count + n,
a=artist.name.replace(" ", ""))
tmp_file = os.path.join(tmp_dir, f)
if self.verbose:
print(tmp_file)
all_lyrics['artists'][-1] = artist.save_lyrics(overwrite=True)
# Save all of the lyrics
with open(filename + '.json', 'w') as outfile:
json.dump(all_lyrics, outfile)
# Delete the temporary directory
shutil.rmtree(tmp_dir)
elapsed = (time.time() - start) / 60 / 60
print("Time elapsed: {t} hours".format(t=elapsed))
|
johnwmillr/LyricsGenius
|
lyricsgenius/api.py
|
Genius._result_is_lyrics
|
python
|
def _result_is_lyrics(self, song_title):
default_terms = ['track\\s?list', 'album art(work)?', 'liner notes',
'booklet', 'credits', 'interview', 'skit',
'instrumental', 'setlist']
if self.excluded_terms:
if self.replace_default_terms:
default_terms = self.excluded_terms
else:
default_terms.extend(self.excluded_terms)
expression = r"".join(["({})|".format(term) for term in default_terms]).strip('|')
regex = re.compile(expression, re.IGNORECASE)
return not regex.search(self._clean_str(song_title))
|
Returns False if result from Genius is not actually song lyrics
Set the `excluded_terms` and `replace_default_terms` as
instance variables within the Genius class.
|
train
|
https://github.com/johnwmillr/LyricsGenius/blob/e36482f7c42235037f3b9b7013edcd54141124e3/lyricsgenius/api.py#L161-L178
|
[
"def _clean_str(self, s):\n \"\"\" Returns a lowercase string with punctuation and bad chars removed\n :param s: string to clean\n \"\"\"\n return s.translate(str.maketrans('', '', punctuation)).replace('\\u200b', \" \").strip().lower()\n"
] |
class Genius(API):
"""User-level interface with the Genius.com API."""
def __init__(self, client_access_token,
response_format='plain', timeout=5, sleep_time=0.5,
verbose=True, remove_section_headers=False,
skip_non_songs=True, excluded_terms=[],
replace_default_terms=False):
""" Genius Client Constructor
:param verbose: Turn printed messages on or off (bool)
:param remove_section_headers: If True, removes [Chorus], [Bridge], etc. headers from lyrics
:param skip_non_songs: If True, attempts to skip non-songs (e.g. track listings)
:param excluded_terms: (list) extra terms for flagging results as non-lyrics
:param replace_default_terms: if True, replaces default excluded terms with user's
"""
super().__init__(client_access_token, response_format, timeout, sleep_time)
self.verbose = verbose
self.remove_section_headers = remove_section_headers
self.skip_non_songs = skip_non_songs
self.excluded_terms = excluded_terms
self.replace_default_terms = replace_default_terms
def _scrape_song_lyrics_from_url(self, url):
""" Use BeautifulSoup to scrape song info off of a Genius song URL
:param url: URL for the web page to scrape lyrics from
"""
page = requests.get(url)
if page.status_code == 404:
return None
# Scrape the song lyrics from the HTML
html = BeautifulSoup(page.text, "html.parser")
div = html.find("div", class_="lyrics")
if not div:
return None # Sometimes the lyrics section isn't found
# Scrape lyrics if proper section was found on page
lyrics = div.get_text()
if self.remove_section_headers: # Remove [Verse], [Bridge], etc.
lyrics = re.sub('(\[.*?\])*', '', lyrics)
lyrics = re.sub('\n{2}', '\n', lyrics) # Gaps between verses
return lyrics.strip("\n")
def _clean_str(self, s):
""" Returns a lowercase string with punctuation and bad chars removed
:param s: string to clean
"""
return s.translate(str.maketrans('', '', punctuation)).replace('\u200b', " ").strip().lower()
def _get_item_from_search_response(self, response, type_):
""" Returns either a Song or Artist result from search_genius_web """
sections = sorted(response['sections'],
key=lambda sect: sect['type'] == type_,
reverse=True)
for section in sections:
hits = [hit for hit in section['hits'] if hit['type'] == type_]
if hits:
return hits[0]['result']
def _result_is_match(self, result, title, artist=None):
""" Returns True if search result matches searched song """
result_title = self._clean_str(result['title'])
title_is_match = result_title == self._clean_str(title)
if not artist:
return title_is_match
result_artist = self._clean_str(result['primary_artist']['name'])
return title_is_match and result_artist == self._clean_str(artist)
def search_song(self, title, artist="", get_full_info=True):
""" Search Genius.com for lyrics to a specific song
:param title: Song title to search for
:param artist: Name of the artist
:param get_full_info: Get full info for each song (slower)
"""
# Search the Genius API for the specified song
if self.verbose:
if artist:
print('Searching for "{s}" by {a}...'.format(s=title, a=artist))
else:
print('Searching for "{s}"...'.format(s=title))
search_term = "{s} {a}".format(s=title, a=artist).strip()
response = self.search_genius_web(search_term)
# Otherwise, move forward with processing the search results
result = self._get_item_from_search_response(response, type_="song")
# Exit search if there were no results returned from API
if not result:
if self.verbose:
print("No results found for: '{s}'".format(s=search_term))
return None
# Reject non-songs (Liner notes, track lists, etc.)
valid = self._result_is_lyrics(result['title']) if self.skip_non_songs else True
if not valid:
if self.verbose:
print('Specified song does not contain lyrics. Rejecting.')
return None
# Download full song info (an API call) unless told not to by user
song_info = result.copy()
if get_full_info:
song_info.update(self.get_song(result['id'])['song'])
lyrics = self._scrape_song_lyrics_from_url(song_info['url'])
# Skip results when URL is a 404 or lyrics are missing
if not lyrics:
if self.verbose:
print('Specified song does not have a valid URL with lyrics. Rejecting.')
return None
# Return a Song object with lyrics if we've made it this far
song = Song(song_info, lyrics)
if self.verbose:
print('Done.')
return song
def search_artist(self, artist_name, max_songs=None,
sort='popularity', per_page=20, get_full_info=True):
"""Search Genius.com for songs by the specified artist.
Returns an Artist object containing artist's songs.
:param artist_name: Name of the artist to search for
:param max_songs: Maximum number of songs to search for
:param sort: Sort by 'title' or 'popularity'
:param per_page: Number of results to return per search page
:param get_full_info: Get full info for each song (slower)
"""
if self.verbose:
print('Searching for songs by {0}...\n'.format(artist_name))
# Perform a Genius API search for the artist
found_artist = None
response = self.search_genius_web(artist_name)
found_artist = self._get_item_from_search_response(response, type_="artist")
# Exit the search if we couldn't find an artist by the given name
if not found_artist:
if self.verbose:
print("No results found for '{a}'.".format(a=artist_name))
return None
# Assume the top search result is the intended artist
artist_id = found_artist['id']
artist_info = self.get_artist(artist_id)
found_name = artist_info['artist']['name']
if found_name != artist_name:
if self.verbose:
print("Changing artist name to '{a}'".format(a=found_name))
artist_name = found_name
# Create the Artist object
artist = Artist(artist_info)
# Download each song by artist, stored as Song objects in Artist object
page = 1
reached_max_songs = False
while not reached_max_songs:
songs_on_page = self.get_artist_songs(artist_id, sort, per_page, page)
# Loop through each song on page of search results
for song_info in songs_on_page['songs']:
# Check if song is valid (e.g. has title, contains lyrics)
has_title = ('title' in song_info)
has_lyrics = self._result_is_lyrics(song_info['title'])
valid = has_title and (has_lyrics or (not self.skip_non_songs))
# Reject non-song results (e.g. Linear Notes, Tracklists, etc.)
if not valid:
if self.verbose:
s = song_info['title'] if has_title else "MISSING TITLE"
print('"{s}" is not valid. Skipping.'.format(s=s))
continue
# Create the Song object from lyrics and metadata
lyrics = self._scrape_song_lyrics_from_url(song_info['url'])
if get_full_info:
info = self.get_song(song_info['id'])
else:
info = {'song': song_info}
song = Song(info, lyrics)
# Attempt to add the Song to the Artist
result = artist.add_song(song, verbose=False)
if result == 0 and self.verbose:
print('Song {n}: "{t}"'.format(n=artist.num_songs,
t=song.title))
# Exit search if the max number of songs has been met
reached_max_songs = max_songs and artist.num_songs >= max_songs
if reached_max_songs:
if self.verbose:
print('\nReached user-specified song limit ({m}).'.format(m=max_songs))
break
# Move on to next page of search results
page = songs_on_page['next_page']
if page is None:
break # Exit search when last page is reached
if self.verbose:
print('Done. Found {n} songs.'.format(n=artist.num_songs))
return artist
def save_artists(self, artists, filename="artist_lyrics", overwrite=False):
"""Save lyrics from multiple Artist objects as JSON object
:param artists: List of Artist objects to save lyrics from
:param filename: Name of output file (json)
:param overwrite: Overwrites preexisting file if True
"""
if isinstance(artists, Artist):
artists = [artists]
# Create a temporary directory for lyrics
start = time.time()
tmp_dir = 'tmp_lyrics'
if not os.path.isdir(tmp_dir):
os.mkdir(tmp_dir)
count = 0
else:
count = len(os.listdir(tmp_dir))
# Check if file already exists
if os.path.isfile(filename + ".json") and not overwrite:
msg = "{f} already exists. Overwrite?\n(y/n): ".format(f=filename)
if input(msg).lower() != "y":
print("Leaving file in place. Exiting.")
os.rmdir(tmp_dir)
return
# Extract each artist's lyrics in json format
all_lyrics = {'artists': []}
for n, artist in enumerate(artists):
if isinstance(artist, Artist):
all_lyrics['artists'].append({})
f = "tmp_{n}_{a}".format(n=count + n,
a=artist.name.replace(" ", ""))
tmp_file = os.path.join(tmp_dir, f)
if self.verbose:
print(tmp_file)
all_lyrics['artists'][-1] = artist.save_lyrics(overwrite=True)
# Save all of the lyrics
with open(filename + '.json', 'w') as outfile:
json.dump(all_lyrics, outfile)
# Delete the temporary directory
shutil.rmtree(tmp_dir)
elapsed = (time.time() - start) / 60 / 60
print("Time elapsed: {t} hours".format(t=elapsed))
|
johnwmillr/LyricsGenius
|
lyricsgenius/api.py
|
Genius._get_item_from_search_response
|
python
|
def _get_item_from_search_response(self, response, type_):
sections = sorted(response['sections'],
key=lambda sect: sect['type'] == type_,
reverse=True)
for section in sections:
hits = [hit for hit in section['hits'] if hit['type'] == type_]
if hits:
return hits[0]['result']
|
Returns either a Song or Artist result from search_genius_web
|
train
|
https://github.com/johnwmillr/LyricsGenius/blob/e36482f7c42235037f3b9b7013edcd54141124e3/lyricsgenius/api.py#L180-L188
| null |
class Genius(API):
"""User-level interface with the Genius.com API."""
def __init__(self, client_access_token,
response_format='plain', timeout=5, sleep_time=0.5,
verbose=True, remove_section_headers=False,
skip_non_songs=True, excluded_terms=[],
replace_default_terms=False):
""" Genius Client Constructor
:param verbose: Turn printed messages on or off (bool)
:param remove_section_headers: If True, removes [Chorus], [Bridge], etc. headers from lyrics
:param skip_non_songs: If True, attempts to skip non-songs (e.g. track listings)
:param excluded_terms: (list) extra terms for flagging results as non-lyrics
:param replace_default_terms: if True, replaces default excluded terms with user's
"""
super().__init__(client_access_token, response_format, timeout, sleep_time)
self.verbose = verbose
self.remove_section_headers = remove_section_headers
self.skip_non_songs = skip_non_songs
self.excluded_terms = excluded_terms
self.replace_default_terms = replace_default_terms
def _scrape_song_lyrics_from_url(self, url):
""" Use BeautifulSoup to scrape song info off of a Genius song URL
:param url: URL for the web page to scrape lyrics from
"""
page = requests.get(url)
if page.status_code == 404:
return None
# Scrape the song lyrics from the HTML
html = BeautifulSoup(page.text, "html.parser")
div = html.find("div", class_="lyrics")
if not div:
return None # Sometimes the lyrics section isn't found
# Scrape lyrics if proper section was found on page
lyrics = div.get_text()
if self.remove_section_headers: # Remove [Verse], [Bridge], etc.
lyrics = re.sub('(\[.*?\])*', '', lyrics)
lyrics = re.sub('\n{2}', '\n', lyrics) # Gaps between verses
return lyrics.strip("\n")
def _clean_str(self, s):
""" Returns a lowercase string with punctuation and bad chars removed
:param s: string to clean
"""
return s.translate(str.maketrans('', '', punctuation)).replace('\u200b', " ").strip().lower()
def _result_is_lyrics(self, song_title):
""" Returns False if result from Genius is not actually song lyrics
Set the `excluded_terms` and `replace_default_terms` as
instance variables within the Genius class.
"""
default_terms = ['track\\s?list', 'album art(work)?', 'liner notes',
'booklet', 'credits', 'interview', 'skit',
'instrumental', 'setlist']
if self.excluded_terms:
if self.replace_default_terms:
default_terms = self.excluded_terms
else:
default_terms.extend(self.excluded_terms)
expression = r"".join(["({})|".format(term) for term in default_terms]).strip('|')
regex = re.compile(expression, re.IGNORECASE)
return not regex.search(self._clean_str(song_title))
def _result_is_match(self, result, title, artist=None):
""" Returns True if search result matches searched song """
result_title = self._clean_str(result['title'])
title_is_match = result_title == self._clean_str(title)
if not artist:
return title_is_match
result_artist = self._clean_str(result['primary_artist']['name'])
return title_is_match and result_artist == self._clean_str(artist)
def search_song(self, title, artist="", get_full_info=True):
""" Search Genius.com for lyrics to a specific song
:param title: Song title to search for
:param artist: Name of the artist
:param get_full_info: Get full info for each song (slower)
"""
# Search the Genius API for the specified song
if self.verbose:
if artist:
print('Searching for "{s}" by {a}...'.format(s=title, a=artist))
else:
print('Searching for "{s}"...'.format(s=title))
search_term = "{s} {a}".format(s=title, a=artist).strip()
response = self.search_genius_web(search_term)
# Otherwise, move forward with processing the search results
result = self._get_item_from_search_response(response, type_="song")
# Exit search if there were no results returned from API
if not result:
if self.verbose:
print("No results found for: '{s}'".format(s=search_term))
return None
# Reject non-songs (Liner notes, track lists, etc.)
valid = self._result_is_lyrics(result['title']) if self.skip_non_songs else True
if not valid:
if self.verbose:
print('Specified song does not contain lyrics. Rejecting.')
return None
# Download full song info (an API call) unless told not to by user
song_info = result.copy()
if get_full_info:
song_info.update(self.get_song(result['id'])['song'])
lyrics = self._scrape_song_lyrics_from_url(song_info['url'])
# Skip results when URL is a 404 or lyrics are missing
if not lyrics:
if self.verbose:
print('Specified song does not have a valid URL with lyrics. Rejecting.')
return None
# Return a Song object with lyrics if we've made it this far
song = Song(song_info, lyrics)
if self.verbose:
print('Done.')
return song
def search_artist(self, artist_name, max_songs=None,
sort='popularity', per_page=20, get_full_info=True):
"""Search Genius.com for songs by the specified artist.
Returns an Artist object containing artist's songs.
:param artist_name: Name of the artist to search for
:param max_songs: Maximum number of songs to search for
:param sort: Sort by 'title' or 'popularity'
:param per_page: Number of results to return per search page
:param get_full_info: Get full info for each song (slower)
"""
if self.verbose:
print('Searching for songs by {0}...\n'.format(artist_name))
# Perform a Genius API search for the artist
found_artist = None
response = self.search_genius_web(artist_name)
found_artist = self._get_item_from_search_response(response, type_="artist")
# Exit the search if we couldn't find an artist by the given name
if not found_artist:
if self.verbose:
print("No results found for '{a}'.".format(a=artist_name))
return None
# Assume the top search result is the intended artist
artist_id = found_artist['id']
artist_info = self.get_artist(artist_id)
found_name = artist_info['artist']['name']
if found_name != artist_name:
if self.verbose:
print("Changing artist name to '{a}'".format(a=found_name))
artist_name = found_name
# Create the Artist object
artist = Artist(artist_info)
# Download each song by artist, stored as Song objects in Artist object
page = 1
reached_max_songs = False
while not reached_max_songs:
songs_on_page = self.get_artist_songs(artist_id, sort, per_page, page)
# Loop through each song on page of search results
for song_info in songs_on_page['songs']:
# Check if song is valid (e.g. has title, contains lyrics)
has_title = ('title' in song_info)
has_lyrics = self._result_is_lyrics(song_info['title'])
valid = has_title and (has_lyrics or (not self.skip_non_songs))
# Reject non-song results (e.g. Linear Notes, Tracklists, etc.)
if not valid:
if self.verbose:
s = song_info['title'] if has_title else "MISSING TITLE"
print('"{s}" is not valid. Skipping.'.format(s=s))
continue
# Create the Song object from lyrics and metadata
lyrics = self._scrape_song_lyrics_from_url(song_info['url'])
if get_full_info:
info = self.get_song(song_info['id'])
else:
info = {'song': song_info}
song = Song(info, lyrics)
# Attempt to add the Song to the Artist
result = artist.add_song(song, verbose=False)
if result == 0 and self.verbose:
print('Song {n}: "{t}"'.format(n=artist.num_songs,
t=song.title))
# Exit search if the max number of songs has been met
reached_max_songs = max_songs and artist.num_songs >= max_songs
if reached_max_songs:
if self.verbose:
print('\nReached user-specified song limit ({m}).'.format(m=max_songs))
break
# Move on to next page of search results
page = songs_on_page['next_page']
if page is None:
break # Exit search when last page is reached
if self.verbose:
print('Done. Found {n} songs.'.format(n=artist.num_songs))
return artist
def save_artists(self, artists, filename="artist_lyrics", overwrite=False):
"""Save lyrics from multiple Artist objects as JSON object
:param artists: List of Artist objects to save lyrics from
:param filename: Name of output file (json)
:param overwrite: Overwrites preexisting file if True
"""
if isinstance(artists, Artist):
artists = [artists]
# Create a temporary directory for lyrics
start = time.time()
tmp_dir = 'tmp_lyrics'
if not os.path.isdir(tmp_dir):
os.mkdir(tmp_dir)
count = 0
else:
count = len(os.listdir(tmp_dir))
# Check if file already exists
if os.path.isfile(filename + ".json") and not overwrite:
msg = "{f} already exists. Overwrite?\n(y/n): ".format(f=filename)
if input(msg).lower() != "y":
print("Leaving file in place. Exiting.")
os.rmdir(tmp_dir)
return
# Extract each artist's lyrics in json format
all_lyrics = {'artists': []}
for n, artist in enumerate(artists):
if isinstance(artist, Artist):
all_lyrics['artists'].append({})
f = "tmp_{n}_{a}".format(n=count + n,
a=artist.name.replace(" ", ""))
tmp_file = os.path.join(tmp_dir, f)
if self.verbose:
print(tmp_file)
all_lyrics['artists'][-1] = artist.save_lyrics(overwrite=True)
# Save all of the lyrics
with open(filename + '.json', 'w') as outfile:
json.dump(all_lyrics, outfile)
# Delete the temporary directory
shutil.rmtree(tmp_dir)
elapsed = (time.time() - start) / 60 / 60
print("Time elapsed: {t} hours".format(t=elapsed))
|
johnwmillr/LyricsGenius
|
lyricsgenius/api.py
|
Genius._result_is_match
|
python
|
def _result_is_match(self, result, title, artist=None):
result_title = self._clean_str(result['title'])
title_is_match = result_title == self._clean_str(title)
if not artist:
return title_is_match
result_artist = self._clean_str(result['primary_artist']['name'])
return title_is_match and result_artist == self._clean_str(artist)
|
Returns True if search result matches searched song
|
train
|
https://github.com/johnwmillr/LyricsGenius/blob/e36482f7c42235037f3b9b7013edcd54141124e3/lyricsgenius/api.py#L190-L197
| null |
class Genius(API):
"""User-level interface with the Genius.com API."""
def __init__(self, client_access_token,
response_format='plain', timeout=5, sleep_time=0.5,
verbose=True, remove_section_headers=False,
skip_non_songs=True, excluded_terms=[],
replace_default_terms=False):
""" Genius Client Constructor
:param verbose: Turn printed messages on or off (bool)
:param remove_section_headers: If True, removes [Chorus], [Bridge], etc. headers from lyrics
:param skip_non_songs: If True, attempts to skip non-songs (e.g. track listings)
:param excluded_terms: (list) extra terms for flagging results as non-lyrics
:param replace_default_terms: if True, replaces default excluded terms with user's
"""
super().__init__(client_access_token, response_format, timeout, sleep_time)
self.verbose = verbose
self.remove_section_headers = remove_section_headers
self.skip_non_songs = skip_non_songs
self.excluded_terms = excluded_terms
self.replace_default_terms = replace_default_terms
def _scrape_song_lyrics_from_url(self, url):
""" Use BeautifulSoup to scrape song info off of a Genius song URL
:param url: URL for the web page to scrape lyrics from
"""
page = requests.get(url)
if page.status_code == 404:
return None
# Scrape the song lyrics from the HTML
html = BeautifulSoup(page.text, "html.parser")
div = html.find("div", class_="lyrics")
if not div:
return None # Sometimes the lyrics section isn't found
# Scrape lyrics if proper section was found on page
lyrics = div.get_text()
if self.remove_section_headers: # Remove [Verse], [Bridge], etc.
lyrics = re.sub('(\[.*?\])*', '', lyrics)
lyrics = re.sub('\n{2}', '\n', lyrics) # Gaps between verses
return lyrics.strip("\n")
def _clean_str(self, s):
""" Returns a lowercase string with punctuation and bad chars removed
:param s: string to clean
"""
return s.translate(str.maketrans('', '', punctuation)).replace('\u200b', " ").strip().lower()
def _result_is_lyrics(self, song_title):
""" Returns False if result from Genius is not actually song lyrics
Set the `excluded_terms` and `replace_default_terms` as
instance variables within the Genius class.
"""
default_terms = ['track\\s?list', 'album art(work)?', 'liner notes',
'booklet', 'credits', 'interview', 'skit',
'instrumental', 'setlist']
if self.excluded_terms:
if self.replace_default_terms:
default_terms = self.excluded_terms
else:
default_terms.extend(self.excluded_terms)
expression = r"".join(["({})|".format(term) for term in default_terms]).strip('|')
regex = re.compile(expression, re.IGNORECASE)
return not regex.search(self._clean_str(song_title))
def _get_item_from_search_response(self, response, type_):
""" Returns either a Song or Artist result from search_genius_web """
sections = sorted(response['sections'],
key=lambda sect: sect['type'] == type_,
reverse=True)
for section in sections:
hits = [hit for hit in section['hits'] if hit['type'] == type_]
if hits:
return hits[0]['result']
def search_song(self, title, artist="", get_full_info=True):
""" Search Genius.com for lyrics to a specific song
:param title: Song title to search for
:param artist: Name of the artist
:param get_full_info: Get full info for each song (slower)
"""
# Search the Genius API for the specified song
if self.verbose:
if artist:
print('Searching for "{s}" by {a}...'.format(s=title, a=artist))
else:
print('Searching for "{s}"...'.format(s=title))
search_term = "{s} {a}".format(s=title, a=artist).strip()
response = self.search_genius_web(search_term)
# Otherwise, move forward with processing the search results
result = self._get_item_from_search_response(response, type_="song")
# Exit search if there were no results returned from API
if not result:
if self.verbose:
print("No results found for: '{s}'".format(s=search_term))
return None
# Reject non-songs (Liner notes, track lists, etc.)
valid = self._result_is_lyrics(result['title']) if self.skip_non_songs else True
if not valid:
if self.verbose:
print('Specified song does not contain lyrics. Rejecting.')
return None
# Download full song info (an API call) unless told not to by user
song_info = result.copy()
if get_full_info:
song_info.update(self.get_song(result['id'])['song'])
lyrics = self._scrape_song_lyrics_from_url(song_info['url'])
# Skip results when URL is a 404 or lyrics are missing
if not lyrics:
if self.verbose:
print('Specified song does not have a valid URL with lyrics. Rejecting.')
return None
# Return a Song object with lyrics if we've made it this far
song = Song(song_info, lyrics)
if self.verbose:
print('Done.')
return song
def search_artist(self, artist_name, max_songs=None,
sort='popularity', per_page=20, get_full_info=True):
"""Search Genius.com for songs by the specified artist.
Returns an Artist object containing artist's songs.
:param artist_name: Name of the artist to search for
:param max_songs: Maximum number of songs to search for
:param sort: Sort by 'title' or 'popularity'
:param per_page: Number of results to return per search page
:param get_full_info: Get full info for each song (slower)
"""
if self.verbose:
print('Searching for songs by {0}...\n'.format(artist_name))
# Perform a Genius API search for the artist
found_artist = None
response = self.search_genius_web(artist_name)
found_artist = self._get_item_from_search_response(response, type_="artist")
# Exit the search if we couldn't find an artist by the given name
if not found_artist:
if self.verbose:
print("No results found for '{a}'.".format(a=artist_name))
return None
# Assume the top search result is the intended artist
artist_id = found_artist['id']
artist_info = self.get_artist(artist_id)
found_name = artist_info['artist']['name']
if found_name != artist_name:
if self.verbose:
print("Changing artist name to '{a}'".format(a=found_name))
artist_name = found_name
# Create the Artist object
artist = Artist(artist_info)
# Download each song by artist, stored as Song objects in Artist object
page = 1
reached_max_songs = False
while not reached_max_songs:
songs_on_page = self.get_artist_songs(artist_id, sort, per_page, page)
# Loop through each song on page of search results
for song_info in songs_on_page['songs']:
# Check if song is valid (e.g. has title, contains lyrics)
has_title = ('title' in song_info)
has_lyrics = self._result_is_lyrics(song_info['title'])
valid = has_title and (has_lyrics or (not self.skip_non_songs))
# Reject non-song results (e.g. Linear Notes, Tracklists, etc.)
if not valid:
if self.verbose:
s = song_info['title'] if has_title else "MISSING TITLE"
print('"{s}" is not valid. Skipping.'.format(s=s))
continue
# Create the Song object from lyrics and metadata
lyrics = self._scrape_song_lyrics_from_url(song_info['url'])
if get_full_info:
info = self.get_song(song_info['id'])
else:
info = {'song': song_info}
song = Song(info, lyrics)
# Attempt to add the Song to the Artist
result = artist.add_song(song, verbose=False)
if result == 0 and self.verbose:
print('Song {n}: "{t}"'.format(n=artist.num_songs,
t=song.title))
# Exit search if the max number of songs has been met
reached_max_songs = max_songs and artist.num_songs >= max_songs
if reached_max_songs:
if self.verbose:
print('\nReached user-specified song limit ({m}).'.format(m=max_songs))
break
# Move on to next page of search results
page = songs_on_page['next_page']
if page is None:
break # Exit search when last page is reached
if self.verbose:
print('Done. Found {n} songs.'.format(n=artist.num_songs))
return artist
def save_artists(self, artists, filename="artist_lyrics", overwrite=False):
"""Save lyrics from multiple Artist objects as JSON object
:param artists: List of Artist objects to save lyrics from
:param filename: Name of output file (json)
:param overwrite: Overwrites preexisting file if True
"""
if isinstance(artists, Artist):
artists = [artists]
# Create a temporary directory for lyrics
start = time.time()
tmp_dir = 'tmp_lyrics'
if not os.path.isdir(tmp_dir):
os.mkdir(tmp_dir)
count = 0
else:
count = len(os.listdir(tmp_dir))
# Check if file already exists
if os.path.isfile(filename + ".json") and not overwrite:
msg = "{f} already exists. Overwrite?\n(y/n): ".format(f=filename)
if input(msg).lower() != "y":
print("Leaving file in place. Exiting.")
os.rmdir(tmp_dir)
return
# Extract each artist's lyrics in json format
all_lyrics = {'artists': []}
for n, artist in enumerate(artists):
if isinstance(artist, Artist):
all_lyrics['artists'].append({})
f = "tmp_{n}_{a}".format(n=count + n,
a=artist.name.replace(" ", ""))
tmp_file = os.path.join(tmp_dir, f)
if self.verbose:
print(tmp_file)
all_lyrics['artists'][-1] = artist.save_lyrics(overwrite=True)
# Save all of the lyrics
with open(filename + '.json', 'w') as outfile:
json.dump(all_lyrics, outfile)
# Delete the temporary directory
shutil.rmtree(tmp_dir)
elapsed = (time.time() - start) / 60 / 60
print("Time elapsed: {t} hours".format(t=elapsed))
|
johnwmillr/LyricsGenius
|
lyricsgenius/api.py
|
Genius.search_song
|
python
|
def search_song(self, title, artist="", get_full_info=True):
# Search the Genius API for the specified song
if self.verbose:
if artist:
print('Searching for "{s}" by {a}...'.format(s=title, a=artist))
else:
print('Searching for "{s}"...'.format(s=title))
search_term = "{s} {a}".format(s=title, a=artist).strip()
response = self.search_genius_web(search_term)
# Otherwise, move forward with processing the search results
result = self._get_item_from_search_response(response, type_="song")
# Exit search if there were no results returned from API
if not result:
if self.verbose:
print("No results found for: '{s}'".format(s=search_term))
return None
# Reject non-songs (Liner notes, track lists, etc.)
valid = self._result_is_lyrics(result['title']) if self.skip_non_songs else True
if not valid:
if self.verbose:
print('Specified song does not contain lyrics. Rejecting.')
return None
# Download full song info (an API call) unless told not to by user
song_info = result.copy()
if get_full_info:
song_info.update(self.get_song(result['id'])['song'])
lyrics = self._scrape_song_lyrics_from_url(song_info['url'])
# Skip results when URL is a 404 or lyrics are missing
if not lyrics:
if self.verbose:
print('Specified song does not have a valid URL with lyrics. Rejecting.')
return None
# Return a Song object with lyrics if we've made it this far
song = Song(song_info, lyrics)
if self.verbose:
print('Done.')
return song
|
Search Genius.com for lyrics to a specific song
:param title: Song title to search for
:param artist: Name of the artist
:param get_full_info: Get full info for each song (slower)
|
train
|
https://github.com/johnwmillr/LyricsGenius/blob/e36482f7c42235037f3b9b7013edcd54141124e3/lyricsgenius/api.py#L199-L247
|
[
"def get_song(self, id_):\n \"\"\"Data for a specific song.\"\"\"\n endpoint = \"songs/{id}\".format(id=id_)\n return self._make_request(endpoint)\n",
"def search_genius_web(self, search_term, per_page=5):\n \"\"\"Use the web-version of Genius search\"\"\"\n endpoint = \"search/multi?\"\n params = {'per_page': per_page, 'q': search_term}\n\n # This endpoint is not part of the API, requires different formatting\n url = \"https://genius.com/api/\" + endpoint + urlencode(params)\n response = requests.get(url, timeout=self.timeout)\n time.sleep(max(self._SLEEP_MIN, self.sleep_time))\n return response.json()['response'] if response else None\n",
"def _scrape_song_lyrics_from_url(self, url):\n \"\"\" Use BeautifulSoup to scrape song info off of a Genius song URL\n :param url: URL for the web page to scrape lyrics from\n \"\"\"\n page = requests.get(url)\n if page.status_code == 404:\n return None\n\n # Scrape the song lyrics from the HTML\n html = BeautifulSoup(page.text, \"html.parser\")\n div = html.find(\"div\", class_=\"lyrics\")\n if not div:\n return None # Sometimes the lyrics section isn't found\n\n # Scrape lyrics if proper section was found on page\n lyrics = div.get_text()\n if self.remove_section_headers: # Remove [Verse], [Bridge], etc.\n lyrics = re.sub('(\\[.*?\\])*', '', lyrics)\n lyrics = re.sub('\\n{2}', '\\n', lyrics) # Gaps between verses\n return lyrics.strip(\"\\n\")\n",
"def _result_is_lyrics(self, song_title):\n \"\"\" Returns False if result from Genius is not actually song lyrics\n Set the `excluded_terms` and `replace_default_terms` as\n instance variables within the Genius class.\n \"\"\"\n\n default_terms = ['track\\\\s?list', 'album art(work)?', 'liner notes',\n 'booklet', 'credits', 'interview', 'skit',\n 'instrumental', 'setlist']\n if self.excluded_terms:\n if self.replace_default_terms:\n default_terms = self.excluded_terms\n else:\n default_terms.extend(self.excluded_terms)\n\n expression = r\"\".join([\"({})|\".format(term) for term in default_terms]).strip('|')\n regex = re.compile(expression, re.IGNORECASE)\n return not regex.search(self._clean_str(song_title))\n",
"def _get_item_from_search_response(self, response, type_):\n \"\"\" Returns either a Song or Artist result from search_genius_web \"\"\"\n sections = sorted(response['sections'],\n key=lambda sect: sect['type'] == type_,\n reverse=True)\n for section in sections:\n hits = [hit for hit in section['hits'] if hit['type'] == type_]\n if hits:\n return hits[0]['result']\n"
] |
class Genius(API):
"""User-level interface with the Genius.com API."""
def __init__(self, client_access_token,
response_format='plain', timeout=5, sleep_time=0.5,
verbose=True, remove_section_headers=False,
skip_non_songs=True, excluded_terms=[],
replace_default_terms=False):
""" Genius Client Constructor
:param verbose: Turn printed messages on or off (bool)
:param remove_section_headers: If True, removes [Chorus], [Bridge], etc. headers from lyrics
:param skip_non_songs: If True, attempts to skip non-songs (e.g. track listings)
:param excluded_terms: (list) extra terms for flagging results as non-lyrics
:param replace_default_terms: if True, replaces default excluded terms with user's
"""
super().__init__(client_access_token, response_format, timeout, sleep_time)
self.verbose = verbose
self.remove_section_headers = remove_section_headers
self.skip_non_songs = skip_non_songs
self.excluded_terms = excluded_terms
self.replace_default_terms = replace_default_terms
def _scrape_song_lyrics_from_url(self, url):
""" Use BeautifulSoup to scrape song info off of a Genius song URL
:param url: URL for the web page to scrape lyrics from
"""
page = requests.get(url)
if page.status_code == 404:
return None
# Scrape the song lyrics from the HTML
html = BeautifulSoup(page.text, "html.parser")
div = html.find("div", class_="lyrics")
if not div:
return None # Sometimes the lyrics section isn't found
# Scrape lyrics if proper section was found on page
lyrics = div.get_text()
if self.remove_section_headers: # Remove [Verse], [Bridge], etc.
lyrics = re.sub('(\[.*?\])*', '', lyrics)
lyrics = re.sub('\n{2}', '\n', lyrics) # Gaps between verses
return lyrics.strip("\n")
def _clean_str(self, s):
""" Returns a lowercase string with punctuation and bad chars removed
:param s: string to clean
"""
return s.translate(str.maketrans('', '', punctuation)).replace('\u200b', " ").strip().lower()
def _result_is_lyrics(self, song_title):
""" Returns False if result from Genius is not actually song lyrics
Set the `excluded_terms` and `replace_default_terms` as
instance variables within the Genius class.
"""
default_terms = ['track\\s?list', 'album art(work)?', 'liner notes',
'booklet', 'credits', 'interview', 'skit',
'instrumental', 'setlist']
if self.excluded_terms:
if self.replace_default_terms:
default_terms = self.excluded_terms
else:
default_terms.extend(self.excluded_terms)
expression = r"".join(["({})|".format(term) for term in default_terms]).strip('|')
regex = re.compile(expression, re.IGNORECASE)
return not regex.search(self._clean_str(song_title))
def _get_item_from_search_response(self, response, type_):
""" Returns either a Song or Artist result from search_genius_web """
sections = sorted(response['sections'],
key=lambda sect: sect['type'] == type_,
reverse=True)
for section in sections:
hits = [hit for hit in section['hits'] if hit['type'] == type_]
if hits:
return hits[0]['result']
def _result_is_match(self, result, title, artist=None):
""" Returns True if search result matches searched song """
result_title = self._clean_str(result['title'])
title_is_match = result_title == self._clean_str(title)
if not artist:
return title_is_match
result_artist = self._clean_str(result['primary_artist']['name'])
return title_is_match and result_artist == self._clean_str(artist)
def search_artist(self, artist_name, max_songs=None,
sort='popularity', per_page=20, get_full_info=True):
"""Search Genius.com for songs by the specified artist.
Returns an Artist object containing artist's songs.
:param artist_name: Name of the artist to search for
:param max_songs: Maximum number of songs to search for
:param sort: Sort by 'title' or 'popularity'
:param per_page: Number of results to return per search page
:param get_full_info: Get full info for each song (slower)
"""
if self.verbose:
print('Searching for songs by {0}...\n'.format(artist_name))
# Perform a Genius API search for the artist
found_artist = None
response = self.search_genius_web(artist_name)
found_artist = self._get_item_from_search_response(response, type_="artist")
# Exit the search if we couldn't find an artist by the given name
if not found_artist:
if self.verbose:
print("No results found for '{a}'.".format(a=artist_name))
return None
# Assume the top search result is the intended artist
artist_id = found_artist['id']
artist_info = self.get_artist(artist_id)
found_name = artist_info['artist']['name']
if found_name != artist_name:
if self.verbose:
print("Changing artist name to '{a}'".format(a=found_name))
artist_name = found_name
# Create the Artist object
artist = Artist(artist_info)
# Download each song by artist, stored as Song objects in Artist object
page = 1
reached_max_songs = False
while not reached_max_songs:
songs_on_page = self.get_artist_songs(artist_id, sort, per_page, page)
# Loop through each song on page of search results
for song_info in songs_on_page['songs']:
# Check if song is valid (e.g. has title, contains lyrics)
has_title = ('title' in song_info)
has_lyrics = self._result_is_lyrics(song_info['title'])
valid = has_title and (has_lyrics or (not self.skip_non_songs))
# Reject non-song results (e.g. Linear Notes, Tracklists, etc.)
if not valid:
if self.verbose:
s = song_info['title'] if has_title else "MISSING TITLE"
print('"{s}" is not valid. Skipping.'.format(s=s))
continue
# Create the Song object from lyrics and metadata
lyrics = self._scrape_song_lyrics_from_url(song_info['url'])
if get_full_info:
info = self.get_song(song_info['id'])
else:
info = {'song': song_info}
song = Song(info, lyrics)
# Attempt to add the Song to the Artist
result = artist.add_song(song, verbose=False)
if result == 0 and self.verbose:
print('Song {n}: "{t}"'.format(n=artist.num_songs,
t=song.title))
# Exit search if the max number of songs has been met
reached_max_songs = max_songs and artist.num_songs >= max_songs
if reached_max_songs:
if self.verbose:
print('\nReached user-specified song limit ({m}).'.format(m=max_songs))
break
# Move on to next page of search results
page = songs_on_page['next_page']
if page is None:
break # Exit search when last page is reached
if self.verbose:
print('Done. Found {n} songs.'.format(n=artist.num_songs))
return artist
def save_artists(self, artists, filename="artist_lyrics", overwrite=False):
"""Save lyrics from multiple Artist objects as JSON object
:param artists: List of Artist objects to save lyrics from
:param filename: Name of output file (json)
:param overwrite: Overwrites preexisting file if True
"""
if isinstance(artists, Artist):
artists = [artists]
# Create a temporary directory for lyrics
start = time.time()
tmp_dir = 'tmp_lyrics'
if not os.path.isdir(tmp_dir):
os.mkdir(tmp_dir)
count = 0
else:
count = len(os.listdir(tmp_dir))
# Check if file already exists
if os.path.isfile(filename + ".json") and not overwrite:
msg = "{f} already exists. Overwrite?\n(y/n): ".format(f=filename)
if input(msg).lower() != "y":
print("Leaving file in place. Exiting.")
os.rmdir(tmp_dir)
return
# Extract each artist's lyrics in json format
all_lyrics = {'artists': []}
for n, artist in enumerate(artists):
if isinstance(artist, Artist):
all_lyrics['artists'].append({})
f = "tmp_{n}_{a}".format(n=count + n,
a=artist.name.replace(" ", ""))
tmp_file = os.path.join(tmp_dir, f)
if self.verbose:
print(tmp_file)
all_lyrics['artists'][-1] = artist.save_lyrics(overwrite=True)
# Save all of the lyrics
with open(filename + '.json', 'w') as outfile:
json.dump(all_lyrics, outfile)
# Delete the temporary directory
shutil.rmtree(tmp_dir)
elapsed = (time.time() - start) / 60 / 60
print("Time elapsed: {t} hours".format(t=elapsed))
|
johnwmillr/LyricsGenius
|
lyricsgenius/api.py
|
Genius.search_artist
|
python
|
def search_artist(self, artist_name, max_songs=None,
sort='popularity', per_page=20, get_full_info=True):
if self.verbose:
print('Searching for songs by {0}...\n'.format(artist_name))
# Perform a Genius API search for the artist
found_artist = None
response = self.search_genius_web(artist_name)
found_artist = self._get_item_from_search_response(response, type_="artist")
# Exit the search if we couldn't find an artist by the given name
if not found_artist:
if self.verbose:
print("No results found for '{a}'.".format(a=artist_name))
return None
# Assume the top search result is the intended artist
artist_id = found_artist['id']
artist_info = self.get_artist(artist_id)
found_name = artist_info['artist']['name']
if found_name != artist_name:
if self.verbose:
print("Changing artist name to '{a}'".format(a=found_name))
artist_name = found_name
# Create the Artist object
artist = Artist(artist_info)
# Download each song by artist, stored as Song objects in Artist object
page = 1
reached_max_songs = False
while not reached_max_songs:
songs_on_page = self.get_artist_songs(artist_id, sort, per_page, page)
# Loop through each song on page of search results
for song_info in songs_on_page['songs']:
# Check if song is valid (e.g. has title, contains lyrics)
has_title = ('title' in song_info)
has_lyrics = self._result_is_lyrics(song_info['title'])
valid = has_title and (has_lyrics or (not self.skip_non_songs))
# Reject non-song results (e.g. Linear Notes, Tracklists, etc.)
if not valid:
if self.verbose:
s = song_info['title'] if has_title else "MISSING TITLE"
print('"{s}" is not valid. Skipping.'.format(s=s))
continue
# Create the Song object from lyrics and metadata
lyrics = self._scrape_song_lyrics_from_url(song_info['url'])
if get_full_info:
info = self.get_song(song_info['id'])
else:
info = {'song': song_info}
song = Song(info, lyrics)
# Attempt to add the Song to the Artist
result = artist.add_song(song, verbose=False)
if result == 0 and self.verbose:
print('Song {n}: "{t}"'.format(n=artist.num_songs,
t=song.title))
# Exit search if the max number of songs has been met
reached_max_songs = max_songs and artist.num_songs >= max_songs
if reached_max_songs:
if self.verbose:
print('\nReached user-specified song limit ({m}).'.format(m=max_songs))
break
# Move on to next page of search results
page = songs_on_page['next_page']
if page is None:
break # Exit search when last page is reached
if self.verbose:
print('Done. Found {n} songs.'.format(n=artist.num_songs))
return artist
|
Search Genius.com for songs by the specified artist.
Returns an Artist object containing artist's songs.
:param artist_name: Name of the artist to search for
:param max_songs: Maximum number of songs to search for
:param sort: Sort by 'title' or 'popularity'
:param per_page: Number of results to return per search page
:param get_full_info: Get full info for each song (slower)
|
train
|
https://github.com/johnwmillr/LyricsGenius/blob/e36482f7c42235037f3b9b7013edcd54141124e3/lyricsgenius/api.py#L249-L334
|
[
"def get_artist(self, id_):\n \"\"\"Data for a specific artist.\"\"\"\n endpoint = \"artists/{id}\".format(id=id_)\n return self._make_request(endpoint)\n",
"def get_artist_songs(self, id_, sort='title', per_page=20, page=1):\n \"\"\"Documents (songs) for the artist specified.\"\"\"\n endpoint = \"artists/{id}/songs\".format(id=id_)\n params = {'sort': sort, 'per_page': per_page, 'page': page}\n return self._make_request(endpoint, params_=params)\n",
"def search_genius_web(self, search_term, per_page=5):\n \"\"\"Use the web-version of Genius search\"\"\"\n endpoint = \"search/multi?\"\n params = {'per_page': per_page, 'q': search_term}\n\n # This endpoint is not part of the API, requires different formatting\n url = \"https://genius.com/api/\" + endpoint + urlencode(params)\n response = requests.get(url, timeout=self.timeout)\n time.sleep(max(self._SLEEP_MIN, self.sleep_time))\n return response.json()['response'] if response else None\n",
"def _result_is_lyrics(self, song_title):\n \"\"\" Returns False if result from Genius is not actually song lyrics\n Set the `excluded_terms` and `replace_default_terms` as\n instance variables within the Genius class.\n \"\"\"\n\n default_terms = ['track\\\\s?list', 'album art(work)?', 'liner notes',\n 'booklet', 'credits', 'interview', 'skit',\n 'instrumental', 'setlist']\n if self.excluded_terms:\n if self.replace_default_terms:\n default_terms = self.excluded_terms\n else:\n default_terms.extend(self.excluded_terms)\n\n expression = r\"\".join([\"({})|\".format(term) for term in default_terms]).strip('|')\n regex = re.compile(expression, re.IGNORECASE)\n return not regex.search(self._clean_str(song_title))\n",
"def _get_item_from_search_response(self, response, type_):\n \"\"\" Returns either a Song or Artist result from search_genius_web \"\"\"\n sections = sorted(response['sections'],\n key=lambda sect: sect['type'] == type_,\n reverse=True)\n for section in sections:\n hits = [hit for hit in section['hits'] if hit['type'] == type_]\n if hits:\n return hits[0]['result']\n"
] |
class Genius(API):
"""User-level interface with the Genius.com API."""
def __init__(self, client_access_token,
response_format='plain', timeout=5, sleep_time=0.5,
verbose=True, remove_section_headers=False,
skip_non_songs=True, excluded_terms=[],
replace_default_terms=False):
""" Genius Client Constructor
:param verbose: Turn printed messages on or off (bool)
:param remove_section_headers: If True, removes [Chorus], [Bridge], etc. headers from lyrics
:param skip_non_songs: If True, attempts to skip non-songs (e.g. track listings)
:param excluded_terms: (list) extra terms for flagging results as non-lyrics
:param replace_default_terms: if True, replaces default excluded terms with user's
"""
super().__init__(client_access_token, response_format, timeout, sleep_time)
self.verbose = verbose
self.remove_section_headers = remove_section_headers
self.skip_non_songs = skip_non_songs
self.excluded_terms = excluded_terms
self.replace_default_terms = replace_default_terms
def _scrape_song_lyrics_from_url(self, url):
""" Use BeautifulSoup to scrape song info off of a Genius song URL
:param url: URL for the web page to scrape lyrics from
"""
page = requests.get(url)
if page.status_code == 404:
return None
# Scrape the song lyrics from the HTML
html = BeautifulSoup(page.text, "html.parser")
div = html.find("div", class_="lyrics")
if not div:
return None # Sometimes the lyrics section isn't found
# Scrape lyrics if proper section was found on page
lyrics = div.get_text()
if self.remove_section_headers: # Remove [Verse], [Bridge], etc.
lyrics = re.sub('(\[.*?\])*', '', lyrics)
lyrics = re.sub('\n{2}', '\n', lyrics) # Gaps between verses
return lyrics.strip("\n")
def _clean_str(self, s):
""" Returns a lowercase string with punctuation and bad chars removed
:param s: string to clean
"""
return s.translate(str.maketrans('', '', punctuation)).replace('\u200b', " ").strip().lower()
def _result_is_lyrics(self, song_title):
""" Returns False if result from Genius is not actually song lyrics
Set the `excluded_terms` and `replace_default_terms` as
instance variables within the Genius class.
"""
default_terms = ['track\\s?list', 'album art(work)?', 'liner notes',
'booklet', 'credits', 'interview', 'skit',
'instrumental', 'setlist']
if self.excluded_terms:
if self.replace_default_terms:
default_terms = self.excluded_terms
else:
default_terms.extend(self.excluded_terms)
expression = r"".join(["({})|".format(term) for term in default_terms]).strip('|')
regex = re.compile(expression, re.IGNORECASE)
return not regex.search(self._clean_str(song_title))
def _get_item_from_search_response(self, response, type_):
""" Returns either a Song or Artist result from search_genius_web """
sections = sorted(response['sections'],
key=lambda sect: sect['type'] == type_,
reverse=True)
for section in sections:
hits = [hit for hit in section['hits'] if hit['type'] == type_]
if hits:
return hits[0]['result']
def _result_is_match(self, result, title, artist=None):
""" Returns True if search result matches searched song """
result_title = self._clean_str(result['title'])
title_is_match = result_title == self._clean_str(title)
if not artist:
return title_is_match
result_artist = self._clean_str(result['primary_artist']['name'])
return title_is_match and result_artist == self._clean_str(artist)
def search_song(self, title, artist="", get_full_info=True):
""" Search Genius.com for lyrics to a specific song
:param title: Song title to search for
:param artist: Name of the artist
:param get_full_info: Get full info for each song (slower)
"""
# Search the Genius API for the specified song
if self.verbose:
if artist:
print('Searching for "{s}" by {a}...'.format(s=title, a=artist))
else:
print('Searching for "{s}"...'.format(s=title))
search_term = "{s} {a}".format(s=title, a=artist).strip()
response = self.search_genius_web(search_term)
# Otherwise, move forward with processing the search results
result = self._get_item_from_search_response(response, type_="song")
# Exit search if there were no results returned from API
if not result:
if self.verbose:
print("No results found for: '{s}'".format(s=search_term))
return None
# Reject non-songs (Liner notes, track lists, etc.)
valid = self._result_is_lyrics(result['title']) if self.skip_non_songs else True
if not valid:
if self.verbose:
print('Specified song does not contain lyrics. Rejecting.')
return None
# Download full song info (an API call) unless told not to by user
song_info = result.copy()
if get_full_info:
song_info.update(self.get_song(result['id'])['song'])
lyrics = self._scrape_song_lyrics_from_url(song_info['url'])
# Skip results when URL is a 404 or lyrics are missing
if not lyrics:
if self.verbose:
print('Specified song does not have a valid URL with lyrics. Rejecting.')
return None
# Return a Song object with lyrics if we've made it this far
song = Song(song_info, lyrics)
if self.verbose:
print('Done.')
return song
def save_artists(self, artists, filename="artist_lyrics", overwrite=False):
"""Save lyrics from multiple Artist objects as JSON object
:param artists: List of Artist objects to save lyrics from
:param filename: Name of output file (json)
:param overwrite: Overwrites preexisting file if True
"""
if isinstance(artists, Artist):
artists = [artists]
# Create a temporary directory for lyrics
start = time.time()
tmp_dir = 'tmp_lyrics'
if not os.path.isdir(tmp_dir):
os.mkdir(tmp_dir)
count = 0
else:
count = len(os.listdir(tmp_dir))
# Check if file already exists
if os.path.isfile(filename + ".json") and not overwrite:
msg = "{f} already exists. Overwrite?\n(y/n): ".format(f=filename)
if input(msg).lower() != "y":
print("Leaving file in place. Exiting.")
os.rmdir(tmp_dir)
return
# Extract each artist's lyrics in json format
all_lyrics = {'artists': []}
for n, artist in enumerate(artists):
if isinstance(artist, Artist):
all_lyrics['artists'].append({})
f = "tmp_{n}_{a}".format(n=count + n,
a=artist.name.replace(" ", ""))
tmp_file = os.path.join(tmp_dir, f)
if self.verbose:
print(tmp_file)
all_lyrics['artists'][-1] = artist.save_lyrics(overwrite=True)
# Save all of the lyrics
with open(filename + '.json', 'w') as outfile:
json.dump(all_lyrics, outfile)
# Delete the temporary directory
shutil.rmtree(tmp_dir)
elapsed = (time.time() - start) / 60 / 60
print("Time elapsed: {t} hours".format(t=elapsed))
|
johnwmillr/LyricsGenius
|
lyricsgenius/api.py
|
Genius.save_artists
|
python
|
def save_artists(self, artists, filename="artist_lyrics", overwrite=False):
if isinstance(artists, Artist):
artists = [artists]
# Create a temporary directory for lyrics
start = time.time()
tmp_dir = 'tmp_lyrics'
if not os.path.isdir(tmp_dir):
os.mkdir(tmp_dir)
count = 0
else:
count = len(os.listdir(tmp_dir))
# Check if file already exists
if os.path.isfile(filename + ".json") and not overwrite:
msg = "{f} already exists. Overwrite?\n(y/n): ".format(f=filename)
if input(msg).lower() != "y":
print("Leaving file in place. Exiting.")
os.rmdir(tmp_dir)
return
# Extract each artist's lyrics in json format
all_lyrics = {'artists': []}
for n, artist in enumerate(artists):
if isinstance(artist, Artist):
all_lyrics['artists'].append({})
f = "tmp_{n}_{a}".format(n=count + n,
a=artist.name.replace(" ", ""))
tmp_file = os.path.join(tmp_dir, f)
if self.verbose:
print(tmp_file)
all_lyrics['artists'][-1] = artist.save_lyrics(overwrite=True)
# Save all of the lyrics
with open(filename + '.json', 'w') as outfile:
json.dump(all_lyrics, outfile)
# Delete the temporary directory
shutil.rmtree(tmp_dir)
elapsed = (time.time() - start) / 60 / 60
print("Time elapsed: {t} hours".format(t=elapsed))
|
Save lyrics from multiple Artist objects as JSON object
:param artists: List of Artist objects to save lyrics from
:param filename: Name of output file (json)
:param overwrite: Overwrites preexisting file if True
|
train
|
https://github.com/johnwmillr/LyricsGenius/blob/e36482f7c42235037f3b9b7013edcd54141124e3/lyricsgenius/api.py#L336-L381
| null |
class Genius(API):
"""User-level interface with the Genius.com API."""
def __init__(self, client_access_token,
response_format='plain', timeout=5, sleep_time=0.5,
verbose=True, remove_section_headers=False,
skip_non_songs=True, excluded_terms=[],
replace_default_terms=False):
""" Genius Client Constructor
:param verbose: Turn printed messages on or off (bool)
:param remove_section_headers: If True, removes [Chorus], [Bridge], etc. headers from lyrics
:param skip_non_songs: If True, attempts to skip non-songs (e.g. track listings)
:param excluded_terms: (list) extra terms for flagging results as non-lyrics
:param replace_default_terms: if True, replaces default excluded terms with user's
"""
super().__init__(client_access_token, response_format, timeout, sleep_time)
self.verbose = verbose
self.remove_section_headers = remove_section_headers
self.skip_non_songs = skip_non_songs
self.excluded_terms = excluded_terms
self.replace_default_terms = replace_default_terms
def _scrape_song_lyrics_from_url(self, url):
""" Use BeautifulSoup to scrape song info off of a Genius song URL
:param url: URL for the web page to scrape lyrics from
"""
page = requests.get(url)
if page.status_code == 404:
return None
# Scrape the song lyrics from the HTML
html = BeautifulSoup(page.text, "html.parser")
div = html.find("div", class_="lyrics")
if not div:
return None # Sometimes the lyrics section isn't found
# Scrape lyrics if proper section was found on page
lyrics = div.get_text()
if self.remove_section_headers: # Remove [Verse], [Bridge], etc.
lyrics = re.sub('(\[.*?\])*', '', lyrics)
lyrics = re.sub('\n{2}', '\n', lyrics) # Gaps between verses
return lyrics.strip("\n")
def _clean_str(self, s):
""" Returns a lowercase string with punctuation and bad chars removed
:param s: string to clean
"""
return s.translate(str.maketrans('', '', punctuation)).replace('\u200b', " ").strip().lower()
def _result_is_lyrics(self, song_title):
""" Returns False if result from Genius is not actually song lyrics
Set the `excluded_terms` and `replace_default_terms` as
instance variables within the Genius class.
"""
default_terms = ['track\\s?list', 'album art(work)?', 'liner notes',
'booklet', 'credits', 'interview', 'skit',
'instrumental', 'setlist']
if self.excluded_terms:
if self.replace_default_terms:
default_terms = self.excluded_terms
else:
default_terms.extend(self.excluded_terms)
expression = r"".join(["({})|".format(term) for term in default_terms]).strip('|')
regex = re.compile(expression, re.IGNORECASE)
return not regex.search(self._clean_str(song_title))
def _get_item_from_search_response(self, response, type_):
""" Returns either a Song or Artist result from search_genius_web """
sections = sorted(response['sections'],
key=lambda sect: sect['type'] == type_,
reverse=True)
for section in sections:
hits = [hit for hit in section['hits'] if hit['type'] == type_]
if hits:
return hits[0]['result']
def _result_is_match(self, result, title, artist=None):
""" Returns True if search result matches searched song """
result_title = self._clean_str(result['title'])
title_is_match = result_title == self._clean_str(title)
if not artist:
return title_is_match
result_artist = self._clean_str(result['primary_artist']['name'])
return title_is_match and result_artist == self._clean_str(artist)
def search_song(self, title, artist="", get_full_info=True):
""" Search Genius.com for lyrics to a specific song
:param title: Song title to search for
:param artist: Name of the artist
:param get_full_info: Get full info for each song (slower)
"""
# Search the Genius API for the specified song
if self.verbose:
if artist:
print('Searching for "{s}" by {a}...'.format(s=title, a=artist))
else:
print('Searching for "{s}"...'.format(s=title))
search_term = "{s} {a}".format(s=title, a=artist).strip()
response = self.search_genius_web(search_term)
# Otherwise, move forward with processing the search results
result = self._get_item_from_search_response(response, type_="song")
# Exit search if there were no results returned from API
if not result:
if self.verbose:
print("No results found for: '{s}'".format(s=search_term))
return None
# Reject non-songs (Liner notes, track lists, etc.)
valid = self._result_is_lyrics(result['title']) if self.skip_non_songs else True
if not valid:
if self.verbose:
print('Specified song does not contain lyrics. Rejecting.')
return None
# Download full song info (an API call) unless told not to by user
song_info = result.copy()
if get_full_info:
song_info.update(self.get_song(result['id'])['song'])
lyrics = self._scrape_song_lyrics_from_url(song_info['url'])
# Skip results when URL is a 404 or lyrics are missing
if not lyrics:
if self.verbose:
print('Specified song does not have a valid URL with lyrics. Rejecting.')
return None
# Return a Song object with lyrics if we've made it this far
song = Song(song_info, lyrics)
if self.verbose:
print('Done.')
return song
def search_artist(self, artist_name, max_songs=None,
sort='popularity', per_page=20, get_full_info=True):
"""Search Genius.com for songs by the specified artist.
Returns an Artist object containing artist's songs.
:param artist_name: Name of the artist to search for
:param max_songs: Maximum number of songs to search for
:param sort: Sort by 'title' or 'popularity'
:param per_page: Number of results to return per search page
:param get_full_info: Get full info for each song (slower)
"""
if self.verbose:
print('Searching for songs by {0}...\n'.format(artist_name))
# Perform a Genius API search for the artist
found_artist = None
response = self.search_genius_web(artist_name)
found_artist = self._get_item_from_search_response(response, type_="artist")
# Exit the search if we couldn't find an artist by the given name
if not found_artist:
if self.verbose:
print("No results found for '{a}'.".format(a=artist_name))
return None
# Assume the top search result is the intended artist
artist_id = found_artist['id']
artist_info = self.get_artist(artist_id)
found_name = artist_info['artist']['name']
if found_name != artist_name:
if self.verbose:
print("Changing artist name to '{a}'".format(a=found_name))
artist_name = found_name
# Create the Artist object
artist = Artist(artist_info)
# Download each song by artist, stored as Song objects in Artist object
page = 1
reached_max_songs = False
while not reached_max_songs:
songs_on_page = self.get_artist_songs(artist_id, sort, per_page, page)
# Loop through each song on page of search results
for song_info in songs_on_page['songs']:
# Check if song is valid (e.g. has title, contains lyrics)
has_title = ('title' in song_info)
has_lyrics = self._result_is_lyrics(song_info['title'])
valid = has_title and (has_lyrics or (not self.skip_non_songs))
# Reject non-song results (e.g. Linear Notes, Tracklists, etc.)
if not valid:
if self.verbose:
s = song_info['title'] if has_title else "MISSING TITLE"
print('"{s}" is not valid. Skipping.'.format(s=s))
continue
# Create the Song object from lyrics and metadata
lyrics = self._scrape_song_lyrics_from_url(song_info['url'])
if get_full_info:
info = self.get_song(song_info['id'])
else:
info = {'song': song_info}
song = Song(info, lyrics)
# Attempt to add the Song to the Artist
result = artist.add_song(song, verbose=False)
if result == 0 and self.verbose:
print('Song {n}: "{t}"'.format(n=artist.num_songs,
t=song.title))
# Exit search if the max number of songs has been met
reached_max_songs = max_songs and artist.num_songs >= max_songs
if reached_max_songs:
if self.verbose:
print('\nReached user-specified song limit ({m}).'.format(m=max_songs))
break
# Move on to next page of search results
page = songs_on_page['next_page']
if page is None:
break # Exit search when last page is reached
if self.verbose:
print('Done. Found {n} songs.'.format(n=artist.num_songs))
return artist
|
johnwmillr/LyricsGenius
|
lyricsgenius/song.py
|
Song.to_dict
|
python
|
def to_dict(self):
return dict({'title': self.title,
'album': self.album,
'year': self.year,
'lyrics': self.lyrics,
'image': self.song_art_image_url})
|
Create a dictionary from the song object
Used in save_lyrics to create json object
:return: Dictionary
|
train
|
https://github.com/johnwmillr/LyricsGenius/blob/e36482f7c42235037f3b9b7013edcd54141124e3/lyricsgenius/song.py#L83-L94
| null |
class Song(object):
"""A song from the Genius.com database."""
def __init__(self, json_dict, lyrics=''):
""" Song Constructor
Properties:
title: Title of the song.
artist: Primary artist on the song.
lyrics: Full set of song lyrics.
album: Name of the album the song is on.
year: Year the song was released.
Methods:
save_lyrics: Save the song lyrics to a JSON or TXT file.
"""
self._body = json_dict['song'] if 'song' in json_dict else json_dict
self._body['lyrics'] = lyrics
self._url = self._body['url']
self._api_path = self._body['api_path']
self._id = self._body['id']
@property
def title(self):
return self._body.get('title')
@property
def artist(self):
primary = self._body.get('primary_artist')
if primary:
return primary.get('name')
@property
def lyrics(self):
return self._body.get('lyrics')
@property
def album(self):
album = self._body.get('album')
if album:
return album.get('name')
@property
def year(self):
return self._body.get('release_date')
@property
def url(self):
return self._body.get('url')
@property
def album_url(self):
album = self._body.get('album')
if album:
return album.get('url')
@property
def featured_artists(self):
return self._body.get('featured_artists')
@property
def media(self):
return self._body.get('media')
@property
def writer_artists(self):
"""List of artists credited as writers"""
return self._body.get('writer_artists')
@property
def song_art_image_url(self):
return self._body.get('song_art_image_url')
def _sanitize_filename(self, f):
keepchars = (" ", ".", "_")
return "".join(c for c in f if c.isalnum() or c in keepchars).rstrip()
def save_lyrics(self, filename=None, extension='json', verbose=True,
overwrite=None, binary_encoding=False):
"""Allows user to save song lyrics from Song object to a .json or .txt file."""
extension = extension.lstrip(".")
assert (extension == 'json') or (extension == 'txt'), "format_ must be JSON or TXT"
# Determine the filename
if filename:
for ext in ["txt", "TXT", "json", "JSON"]:
filename = filename.replace("." + ext, "")
filename += "." + extension
else:
filename = "Lyrics_{}_{}.{}".format(self.artist.replace(" ", ""),
self.title.replace(" ", ""),
extension).lower()
filename = self._sanitize_filename(filename)
# Check if file already exists
write_file = False
if not os.path.isfile(filename):
write_file = True
elif overwrite:
write_file = True
else:
if input("{} already exists. Overwrite?\n(y/n): ".format(filename)).lower() == 'y':
write_file = True
# Format lyrics as either .txt or .json
if extension == 'json':
lyrics_to_write = {'songs': [], 'artist': self.artist}
lyrics_to_write['songs'].append(self.to_dict())
else:
lyrics_to_write = self.lyrics
if binary_encoding:
lyrics_to_write = lyrics_to_write.encode('utf8')
# Write the lyrics to either a .json or .txt file
if write_file:
with open(filename, 'wb' if binary_encoding else 'w') as lyrics_file:
if extension == 'json':
json.dump(lyrics_to_write, lyrics_file)
else:
lyrics_file.write(lyrics_to_write)
if verbose:
print('Wrote {} to {}.'.format(self.title, filename))
else:
if verbose:
print('Skipping file save.\n')
return lyrics_to_write
def __str__(self):
"""Return a string representation of the Song object."""
if len(self.lyrics) > 100:
lyr = self.lyrics[:100] + "..."
else:
lyr = self.lyrics[:100]
return '"{title}" by {artist}:\n {lyrics}'.format(
title=self.title, artist=self.artist, lyrics=lyr.replace('\n', '\n '))
def __repr__(self):
return repr((self.title, self.artist))
def __cmp__(self, other):
return cmp(self.title, other.title) and cmp(self.artist, other.artist) and cmp(self.lyrics, other.lyrics)
|
johnwmillr/LyricsGenius
|
lyricsgenius/song.py
|
Song.save_lyrics
|
python
|
def save_lyrics(self, filename=None, extension='json', verbose=True,
overwrite=None, binary_encoding=False):
extension = extension.lstrip(".")
assert (extension == 'json') or (extension == 'txt'), "format_ must be JSON or TXT"
# Determine the filename
if filename:
for ext in ["txt", "TXT", "json", "JSON"]:
filename = filename.replace("." + ext, "")
filename += "." + extension
else:
filename = "Lyrics_{}_{}.{}".format(self.artist.replace(" ", ""),
self.title.replace(" ", ""),
extension).lower()
filename = self._sanitize_filename(filename)
# Check if file already exists
write_file = False
if not os.path.isfile(filename):
write_file = True
elif overwrite:
write_file = True
else:
if input("{} already exists. Overwrite?\n(y/n): ".format(filename)).lower() == 'y':
write_file = True
# Format lyrics as either .txt or .json
if extension == 'json':
lyrics_to_write = {'songs': [], 'artist': self.artist}
lyrics_to_write['songs'].append(self.to_dict())
else:
lyrics_to_write = self.lyrics
if binary_encoding:
lyrics_to_write = lyrics_to_write.encode('utf8')
# Write the lyrics to either a .json or .txt file
if write_file:
with open(filename, 'wb' if binary_encoding else 'w') as lyrics_file:
if extension == 'json':
json.dump(lyrics_to_write, lyrics_file)
else:
lyrics_file.write(lyrics_to_write)
if verbose:
print('Wrote {} to {}.'.format(self.title, filename))
else:
if verbose:
print('Skipping file save.\n')
return lyrics_to_write
|
Allows user to save song lyrics from Song object to a .json or .txt file.
|
train
|
https://github.com/johnwmillr/LyricsGenius/blob/e36482f7c42235037f3b9b7013edcd54141124e3/lyricsgenius/song.py#L100-L149
|
[
"def to_dict(self):\n \"\"\"\n Create a dictionary from the song object\n Used in save_lyrics to create json object\n\n :return: Dictionary\n \"\"\"\n return dict({'title': self.title,\n 'album': self.album,\n 'year': self.year,\n 'lyrics': self.lyrics,\n 'image': self.song_art_image_url})\n",
"def _sanitize_filename(self, f):\n keepchars = (\" \", \".\", \"_\")\n return \"\".join(c for c in f if c.isalnum() or c in keepchars).rstrip()\n"
] |
class Song(object):
"""A song from the Genius.com database."""
def __init__(self, json_dict, lyrics=''):
""" Song Constructor
Properties:
title: Title of the song.
artist: Primary artist on the song.
lyrics: Full set of song lyrics.
album: Name of the album the song is on.
year: Year the song was released.
Methods:
save_lyrics: Save the song lyrics to a JSON or TXT file.
"""
self._body = json_dict['song'] if 'song' in json_dict else json_dict
self._body['lyrics'] = lyrics
self._url = self._body['url']
self._api_path = self._body['api_path']
self._id = self._body['id']
@property
def title(self):
return self._body.get('title')
@property
def artist(self):
primary = self._body.get('primary_artist')
if primary:
return primary.get('name')
@property
def lyrics(self):
return self._body.get('lyrics')
@property
def album(self):
album = self._body.get('album')
if album:
return album.get('name')
@property
def year(self):
return self._body.get('release_date')
@property
def url(self):
return self._body.get('url')
@property
def album_url(self):
album = self._body.get('album')
if album:
return album.get('url')
@property
def featured_artists(self):
return self._body.get('featured_artists')
@property
def media(self):
return self._body.get('media')
@property
def writer_artists(self):
"""List of artists credited as writers"""
return self._body.get('writer_artists')
@property
def song_art_image_url(self):
return self._body.get('song_art_image_url')
def to_dict(self):
"""
Create a dictionary from the song object
Used in save_lyrics to create json object
:return: Dictionary
"""
return dict({'title': self.title,
'album': self.album,
'year': self.year,
'lyrics': self.lyrics,
'image': self.song_art_image_url})
def _sanitize_filename(self, f):
keepchars = (" ", ".", "_")
return "".join(c for c in f if c.isalnum() or c in keepchars).rstrip()
def __str__(self):
"""Return a string representation of the Song object."""
if len(self.lyrics) > 100:
lyr = self.lyrics[:100] + "..."
else:
lyr = self.lyrics[:100]
return '"{title}" by {artist}:\n {lyrics}'.format(
title=self.title, artist=self.artist, lyrics=lyr.replace('\n', '\n '))
def __repr__(self):
return repr((self.title, self.artist))
def __cmp__(self, other):
return cmp(self.title, other.title) and cmp(self.artist, other.artist) and cmp(self.lyrics, other.lyrics)
|
johnwmillr/LyricsGenius
|
lyricsgenius/artist.py
|
Artist.add_song
|
python
|
def add_song(self, new_song, verbose=True):
if any([song.title == new_song.title for song in self._songs]):
if verbose:
print('{s} already in {a}, not adding song.'.format(s=new_song.title,
a=self.name))
return 1 # Failure
if new_song.artist == self.name:
self._songs.append(new_song)
self._num_songs += 1
return 0 # Success
if verbose:
print("Can't add song by {b}, artist must be {a}.".format(b=new_song.artist,
a=self.name))
return 1
|
Add a Song object to the Artist object
|
train
|
https://github.com/johnwmillr/LyricsGenius/blob/e36482f7c42235037f3b9b7013edcd54141124e3/lyricsgenius/artist.py#L50-L65
| null |
class Artist(object):
"""An artist with songs from the Genius.com database."""
def __init__(self, json_dict):
""" Artist Constructor
Properties:
name: Artist name.
image_url: URL to the artist image on Genius.com
songs: List of the artist's Song objects
num_songs: Number of songs in the Artist object
Methods:
add_song: Add a song to the Artist object
save_lyrics: Save the lyrics to a JSON or TXT file
"""
self._body = json_dict['artist']
self._url = self._body['url']
self._api_path = self._body['api_path']
self._id = self._body['id']
self._songs = []
self._num_songs = len(self._songs)
self._songs_dropped = 0
def __len__(self):
return 1
@property
def name(self):
return self._body['name']
@property
def image_url(self):
if 'image_url' in self._body:
return self._body['image_url']
@property
def songs(self):
return self._songs
@property
def num_songs(self):
return self._num_songs
# Failure
def get_song(self, song_name):
"""Search Genius.com for *song_name* and add it to artist"""
raise NotImplementedError("I need to figure out how to allow Artist() to access Genius.search_song().")
# song = Genius.search_song(song_name, self.name)
# self.add_song(song)
# return
# TODO: define an export_to_json() method
def save_lyrics(self, extension='json', overwrite=False,
verbose=True, binary_encoding=False):
"""Allows user to save all lyrics within an Artist object"""
extension = extension.lstrip(".")
assert (extension == 'json') or (extension == 'txt'), "format_ must be JSON or TXT"
for song in self.songs:
song.save_lyrics(extension=extension, overwrite=overwrite, verbose=verbose, binary_encoding=binary_encoding)
def __str__(self):
"""Return a string representation of the Artist object."""
msg = "{name}, {num} songs".format(name=self.name, num=self._num_songs)
msg = msg[:-1] if self._num_songs == 1 else msg
return msg
def __repr__(self):
msg = "{num} songs".format(num=self._num_songs)
msg = repr((self.name, msg[:-1])) if self._num_songs == 1 else repr((self.name, msg))
return msg
|
johnwmillr/LyricsGenius
|
lyricsgenius/artist.py
|
Artist.save_lyrics
|
python
|
def save_lyrics(self, extension='json', overwrite=False,
verbose=True, binary_encoding=False):
extension = extension.lstrip(".")
assert (extension == 'json') or (extension == 'txt'), "format_ must be JSON or TXT"
for song in self.songs:
song.save_lyrics(extension=extension, overwrite=overwrite, verbose=verbose, binary_encoding=binary_encoding)
|
Allows user to save all lyrics within an Artist object
|
train
|
https://github.com/johnwmillr/LyricsGenius/blob/e36482f7c42235037f3b9b7013edcd54141124e3/lyricsgenius/artist.py#L76-L83
| null |
class Artist(object):
"""An artist with songs from the Genius.com database."""
def __init__(self, json_dict):
""" Artist Constructor
Properties:
name: Artist name.
image_url: URL to the artist image on Genius.com
songs: List of the artist's Song objects
num_songs: Number of songs in the Artist object
Methods:
add_song: Add a song to the Artist object
save_lyrics: Save the lyrics to a JSON or TXT file
"""
self._body = json_dict['artist']
self._url = self._body['url']
self._api_path = self._body['api_path']
self._id = self._body['id']
self._songs = []
self._num_songs = len(self._songs)
self._songs_dropped = 0
def __len__(self):
return 1
@property
def name(self):
return self._body['name']
@property
def image_url(self):
if 'image_url' in self._body:
return self._body['image_url']
@property
def songs(self):
return self._songs
@property
def num_songs(self):
return self._num_songs
def add_song(self, new_song, verbose=True):
"""Add a Song object to the Artist object"""
if any([song.title == new_song.title for song in self._songs]):
if verbose:
print('{s} already in {a}, not adding song.'.format(s=new_song.title,
a=self.name))
return 1 # Failure
if new_song.artist == self.name:
self._songs.append(new_song)
self._num_songs += 1
return 0 # Success
if verbose:
print("Can't add song by {b}, artist must be {a}.".format(b=new_song.artist,
a=self.name))
return 1 # Failure
def get_song(self, song_name):
"""Search Genius.com for *song_name* and add it to artist"""
raise NotImplementedError("I need to figure out how to allow Artist() to access Genius.search_song().")
# song = Genius.search_song(song_name, self.name)
# self.add_song(song)
# return
# TODO: define an export_to_json() method
def __str__(self):
"""Return a string representation of the Artist object."""
msg = "{name}, {num} songs".format(name=self.name, num=self._num_songs)
msg = msg[:-1] if self._num_songs == 1 else msg
return msg
def __repr__(self):
msg = "{num} songs".format(num=self._num_songs)
msg = repr((self.name, msg[:-1])) if self._num_songs == 1 else repr((self.name, msg))
return msg
|
michaelliao/sinaweibopy
|
snspy.py
|
_parse_json
|
python
|
def _parse_json(s):
'''
Parse json string into JsonDict.
>>> r = _parse_json(r'{"name":"Michael","score":95}')
>>> r.name
u'Michael'
>>> r['score']
95
'''
return json.loads(s, object_hook=lambda pairs: JsonDict(pairs.iteritems()))
|
Parse json string into JsonDict.
>>> r = _parse_json(r'{"name":"Michael","score":95}')
>>> r.name
u'Michael'
>>> r['score']
95
|
train
|
https://github.com/michaelliao/sinaweibopy/blob/0f19dd71c1fbd16ee539620c7e9e986887f5c665/snspy.py#L77-L87
| null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
__version__ = '1.0.0'
__author__ = 'Liao Xuefeng (askxuefeng@gmail.com)'
'''
Python client SDK for SNS API using OAuth 2. Require Python 2.6/2.7.
'''
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
import time
import json
import hmac
import hashlib
import base64
import urllib
import urllib2
import urlparse
import gzip
import logging
import mimetypes
import collections
class JsonDict(dict):
'''
General json object that allows attributes to be bound to and also behaves like a dict.
>>> jd = JsonDict(a=1, b='test')
>>> jd.a
1
>>> jd.b
'test'
>>> jd['b']
'test'
>>> jd.c
Traceback (most recent call last):
...
AttributeError: 'JsonDict' object has no attribute 'c'
>>> jd['c']
Traceback (most recent call last):
...
KeyError: 'c'
'''
def __getattr__(self, attr):
try:
return self[attr]
except KeyError:
raise AttributeError(r"'JsonDict' object has no attribute '%s'" % attr)
def __setattr__(self, attr, value):
self[attr] = value
class APIError(StandardError):
'''
raise APIError if receiving json message indicating failure.
'''
def __init__(self, error_code, error, request):
self.error_code = error_code
self.error = error
self.request = request
StandardError.__init__(self, error)
def __str__(self):
return 'APIError: %s: %s, request: %s' % (self.error_code, self.error, self.request)
def _encode_params(**kw):
'''
Do url-encode parameters
>>> _encode_params(a=1, b='R&D')
'a=1&b=R%26D'
>>> _encode_params(a=u'\u4e2d\u6587', b=['A', 'B', 123])
'a=%E4%B8%AD%E6%96%87&b=A&b=B&b=123'
'''
def _encode(L, k, v):
if isinstance(v, unicode):
L.append('%s=%s' % (k, urllib.quote(v.encode('utf-8'))))
elif isinstance(v, str):
L.append('%s=%s' % (k, urllib.quote(v)))
elif isinstance(v, collections.Iterable):
for x in v:
_encode(L, k, x)
else:
L.append('%s=%s' % (k, urllib.quote(str(v))))
args = []
for k, v in kw.iteritems():
_encode(args, k, v)
return '&'.join(args)
def _encode_multipart(**kw):
' build a multipart/form-data body with randomly generated boundary '
boundary = '----------%s' % hex(int(time.time() * 1000))
data = []
for k, v in kw.iteritems():
data.append('--%s' % boundary)
if hasattr(v, 'read'):
# file-like object:
filename = getattr(v, 'name', '')
content = v.read()
data.append('Content-Disposition: form-data; name="%s"; filename="hidden"' % k)
data.append('Content-Length: %d' % len(content))
data.append('Content-Type: %s\r\n' % _guess_content_type(filename))
data.append(content)
else:
data.append('Content-Disposition: form-data; name="%s"\r\n' % k)
data.append(v.encode('utf-8') if isinstance(v, unicode) else v)
data.append('--%s--\r\n' % boundary)
return '\r\n'.join(data), boundary
def _guess_content_type(url):
'''
Guess content type by url.
>>> _guess_content_type('http://test/A.HTML')
'text/html'
>>> _guess_content_type('http://test/a.jpg')
'image/jpeg'
>>> _guess_content_type('/path.txt/aaa')
'application/octet-stream'
'''
OCTET_STREAM = 'application/octet-stream'
n = url.rfind('.')
if n == -1:
return OCTET_STREAM
return mimetypes.types_map.get(url[n:].lower(), OCTET_STREAM)
_HTTP_GET = 'GET'
_HTTP_POST = 'POST'
_HTTP_UPLOAD = 'UPLOAD'
def _read_http_body(http_obj):
using_gzip = http_obj.headers.get('Content-Encoding', '') == 'gzip'
body = http_obj.read()
if using_gzip:
gzipper = gzip.GzipFile(fileobj=StringIO(body))
fcontent = gzipper.read()
gzipper.close()
return fcontent
return body
def _http(method, url, headers=None, **kw):
'''
Send http request and return response text.
'''
params = None
boundary = None
if method == 'UPLOAD':
params, boundary = _encode_multipart(**kw)
else:
params = _encode_params(**kw)
http_url = '%s?%s' % (url, params) if method == _HTTP_GET else url
http_body = None if method == 'GET' else params
logging.error('%s: %s' % (method, http_url))
req = urllib2.Request(http_url, data=http_body)
req.add_header('Accept-Encoding', 'gzip')
if headers:
for k, v in headers.iteritems():
req.add_header(k, v)
if boundary:
req.add_header('Content-Type', 'multipart/form-data; boundary=%s' % boundary)
try:
resp = urllib2.urlopen(req, timeout=5)
return _read_http_body(resp)
finally:
pass
class SNSMixin(object):
def __init__(self, app_key, app_secret, redirect_uri):
self._client_id = app_key
self._client_secret = app_secret
self._redirect_uri = redirect_uri
def _prepare_api(self, method, path, access_token, **kw):
raise StandardError('Subclass must implement \'_prepare_api\' method.')
def on_http_error(self, e):
try:
r = _parse_json(_read_http_body(e))
except:
r = None
if hasattr(r, 'error_code'):
raise APIError(r.error_code, r.get('error', ''), r.get('request', ''))
raise e
class SinaWeiboMixin(SNSMixin):
def get_authorize_url(self, redirect_uri, **kw):
'''
return the authorization url that the user should be redirected to.
'''
redirect = redirect_uri if redirect_uri else self._redirect_uri
if not redirect:
raise APIError('21305', 'Parameter absent: redirect_uri', 'OAuth2 request')
response_type = kw.pop('response_type', 'code')
return 'https://api.weibo.com/oauth2/authorize?%s' % \
_encode_params(client_id=self._client_id,
response_type=response_type,
redirect_uri=redirect, **kw)
def _prepare_api(self, method, path, access_token, **kw):
'''
Get api url.
'''
headers = None
if access_token:
headers = {'Authorization': 'OAuth2 %s' % access_token}
if '/remind/' in path:
# sina remind api url is different:
return method, 'https://rm.api.weibo.com/2/%s.json' % path, headers, kw
if method == 'POST' and 'pic' in kw:
# if 'pic' in parameter, set to UPLOAD mode:
return 'UPLOAD', 'https://api.weibo.com/2/%s.json' % path, headers, kw
return method, 'https://api.weibo.com/2/%s.json' % path, headers, kw
def request_access_token(self, code, redirect_uri=None):
'''
Return access token as a JsonDict: {"access_token":"your-access-token","expires":12345678,"uid":1234}, expires is represented using standard unix-epoch-time
'''
redirect = redirect_uri or self._redirect_uri
resp_text = _http('POST', 'https://api.weibo.com/oauth2/access_token',
client_id=self._client_id, client_secret=self._client_secret,
redirect_uri=redirect, code=code, grant_type='authorization_code')
r = _parse_json(resp_text)
current = int(time.time())
expires = r.expires_in + current
remind_in = r.get('remind_in', None)
if remind_in:
rtime = int(remind_in) + current
if rtime < expires:
expires = rtime
return JsonDict(access_token=r.access_token, expires=expires, uid=r.get('uid', None))
def parse_signed_request(self, signed_request):
'''
parse signed request when using in-site app.
Returns:
dict object like { 'uid': 12345, 'access_token': 'ABC123XYZ', 'expires': unix-timestamp },
or None if parse failed.
'''
def _b64_normalize(s):
appendix = '=' * (4 - len(s) % 4)
return s.replace('-', '+').replace('_', '/') + appendix
sr = str(signed_request)
logging.info('parse signed request: %s' % sr)
enc_sig, enc_payload = sr.split('.', 1)
sig = base64.b64decode(_b64_normalize(enc_sig))
data = _parse_json(base64.b64decode(_b64_normalize(enc_payload)))
if data['algorithm'] != u'HMAC-SHA256':
return None
expected_sig = hmac.new(self.client_secret, enc_payload, hashlib.sha256).digest()
if expected_sig == sig:
data.user_id = data.uid = data.get('user_id', None)
data.access_token = data.get('oauth_token', None)
expires = data.get('expires', None)
if expires:
data.expires = data.expires_in = time.time() + expires
return data
return None
class QQMixin(SNSMixin):
def get_authorize_url(self, redirect_uri='', **kw):
'''
return the authorization url that the user should be redirected to.
'''
redirect = redirect_uri if redirect_uri else self._redirect_uri
if not redirect:
raise APIError('21305', 'Parameter absent: redirect_uri', 'OAuth2 request')
response_type = kw.pop('response_type', 'code')
return 'https://graph.qq.com/oauth2.0/authorize?%s' % \
_encode_params(client_id=self._client_id,
response_type=response_type,
redirect_uri=redirect, **kw)
def _prepare_api(self, method, path, access_token, **kw):
kw['access_token'] = access_token
kw['oauth_consumer_key'] = self._client_id
return method, 'https://graph.qq.com/%s' % path, None, kw
def request_access_token(self, code, redirect_uri=None):
'''
Return access token as a JsonDict: {"access_token":"your-access-token","expires":12345678,"uid":1234}, expires is represented using standard unix-epoch-time
'''
redirect = redirect_uri or self._redirect_uri
resp_text = _http('POST', 'https://graph.qq.com/oauth2.0/token',
client_id=self._client_id, client_secret=self._client_secret,
redirect_uri=redirect, code=code, grant_type='authorization_code')
return self._parse_access_token(resp_text)
def refresh_access_token(self, refresh_token, redirect_uri=None):
'''
Refresh access token.
'''
redirect = redirect_uri or self._redirect_uri
resp_text = _http('POST', 'https://graph.qq.com/oauth2.0/token',
refresh_token=refresh_token,
client_id=self._client_id, client_secret=self._client_secret,
redirect_uri=redirect, grant_type='refresh_token')
return self._parse_access_token(resp_text)
# FIXME: get oauthid from 'https://graph.z.qq.com/moc2/me?access_token=%s' % access_token
def _parse_access_token(self, resp_text):
' parse access token from urlencoded str like access_token=abcxyz&expires_in=123000&other=true '
r = self._qs2dict(resp_text)
access_token = r.pop('access_token')
expires = time.time() + float(r.pop('expires_in'))
return JsonDict(access_token=access_token, expires=expires, **r)
def _qs2dict(self, text):
qs = urlparse.parse_qs(text)
return dict(((k, v[0]) for k, v in qs.iteritems()))
def get_openid(self, access_token):
resp_text = _http('GET', 'https://graph.z.qq.com/moc2/me', access_token=access_token)
r = self._qs2dict(resp_text)
return r['openid']
class APIClient(object):
'''
API client using synchronized invocation.
'''
def __init__(self, mixin, app_key, app_secret, redirect_uri='', access_token='', expires=0.0):
self._mixin = mixin(app_key, app_secret, redirect_uri)
self._access_token = str(access_token)
self._expires = expires
def set_access_token(self, access_token, expires):
self._access_token = str(access_token)
self._expires = float(expires)
def get_authorize_url(self, redirect_uri='', **kw):
'''
return the authorization url that the user should be redirected to.
'''
return self._mixin.get_authorize_url(redirect_uri or self._mixin._redirect_uri, **kw)
def request_access_token(self, code, redirect_uri=None):
'''
Return access token as a JsonDict:
{
"access_token": "your-access-token",
"expires": 12345678, # represented using standard unix-epoch-time
"uid": 1234 # other fields
}
'''
r = self._mixin.request_access_token(code, redirect_uri)
self._access_token = r.access_token
return r
def refresh_token(self, refresh_token):
req_str = '%s%s' % (self.auth_url, 'access_token')
r = _http('POST', req_str,
client_id=self.client_id,
client_secret=self.client_secret,
refresh_token=refresh_token,
grant_type='refresh_token')
return self._parse_access_token(r)
def is_expires(self):
return not self.access_token or time.time() > self.expires
def call_api(self, http_method, http_path, **kw):
method, the_url, headers, params = self._mixin._prepare_api(http_method, http_path, self._access_token, **kw)
logging.debug('Call API: %s: %s' % (method, the_url))
try:
resp = _http(method, the_url, headers, **params)
except urllib2.HTTPError, e:
return self._mixin.on_http_error(e)
r = _parse_json(resp)
if hasattr(r, 'error_code'):
raise APIError(r.error_code, r.get('error', ''), r.get('request', ''))
return r
def __getattr__(self, attr):
if hasattr(self._mixin, attr):
return getattr(self._mixin, attr)
return _Callable(self, attr)
class _Executable(object):
def __init__(self, client, method, path):
self._client = client
self._method = method
self._path = path
def __call__(self, **kw):
return self._client.call_api(self._method, self._path, **kw)
def __str__(self):
return '_Executable (%s %s)' % (self._method, self._path)
__repr__ = __str__
class _Callable(object):
def __init__(self, client, name):
self._client = client
self._name = name
def __getattr__(self, attr):
if attr == 'get':
return _Executable(self._client, 'GET', self._name)
if attr == 'post':
return _Executable(self._client, 'POST', self._name)
name = '%s/%s' % (self._name, attr)
return _Callable(self._client, name)
def __str__(self):
return '_Callable (%s)' % self._name
__repr__ = __str__
if __name__ == '__main__':
#import doctest
#doctest.testmod()
APP_KEY = '???'
APP_SECRET = '???'
access_token = '???'
expires = 1393739173.5
#c = APIClient(QQMixin, APP_KEY, APP_SECRET, 'http://www.liaoxuefeng.com/auth/callback', access_token, expires)
#print c.get_openid(access_token)
#r = c.user.get_user_info.get(openid=openid)
# test get:
#r = c.statuses.home_timeline.get(count=10)
#print r
# test post:
#r = c.statuses.update.post(status=u'测试http post')
#print r
# test upload:
#r = c.statuses.upload.post(status=u'测试upload pic', pic=StringIO(base64.b64decode('iVBORw0KGgoAAAANSUhEUgAAAFAAAABQCAIAAAABc2X6AAAAGXRFWHRTb2Z0d2FyZQBBZG9iZSBJbWFnZVJlYWR5ccllPAAALPBJREFUeNqEfAmYVOWV9t1qr67qpXql6W6gAdmVZgfFFXEnGjXBKGPUmDijj8vEuCVqYsaMPnGNjtskOo8LiCbRRBExSDKKiiIq0DR023s3vVVvtd/1f79zbhVO/sz/19MPdN+697vfd75z3vOe5V45u2KFZNuSosi5nOT1SrouGYbk8Uiq6gQC+BPH8YtsmjhHfGtZ+NbxenHE8XjEVaoqmaas6/hT0jQHf+JMHMGw9JWED67CgPSVjN8VxZFl2TAwDr4Sg8uyOM228R++EjPBNBxHnOA4+EaMJv6TxUHcF5fgh77CTfGPQ5eLC2lkcRxn4iuMiani1vjdMBRxP8tSMN1QyL3A55PpbDmdlnGNzyeuxBrwwbe4El9hRFqVGC6bxWyEdGi1Mi1exrcYDQLC+RAEBsE8cByrxSzpRrhESaXE1DEViMPnc/x+sSj8sBwxGk1dpsWI2ePygoBwC/zgND5HUZRcTmYx0TrdO7K88C8JXcNBiYWHDwQP4eECHMS42IFQSOb9z2SwPLF4zJ7W6c4Di8QRWRZ7jtFxHOPiIL7FUJmMWAaLD5MjGbMeyVghJoGDuJy2DpeLcfAnzsEEcBf8jgt5rpAsjrPK4E+6o3uQhCvmBnlB3Hwj2nbWCPGD8UkjSAPxwzqD0bEw0mcxBEbExYYhp1JilrxRuFjThALTOa5S4BxMggbBjYUV4H6YE/7lGeOHF4ARsDB88C9Mg/cnPwc2LneikAj+py0VF/Iy+O7Yf1yO33EVzuS74Ka4tSJ0VgxApuSQyFwtowkr7nZDGbJZh7/je2MZJEJsslNUBGUT68cmk9U5pA5sjQUR4Bcb36bTWIaSSGBMVg2HrMAdGUdwR5qB2BysnzUfIuPVYvN5nZgSjmMxLGX+kOkKrQkGZVYEljt9xPkYHydDNxkL+CtIljECNixugyuxBllWsL34Ipt1zQBanUzCkoW2YDjMyTSV0VEX3khPXGvBPNjkfD6vBoUWuOJuC0+lYLrYB2wdxIezYMY4wurHukpw5d6OJirlcVHNZj0AC0iWrFSsgUGHAYUVmJbEB4+pNE0PC5FIHApLnaFInMTQB9vA4iGFcBjXeHRdy2TMbFYjDHN4W2DwZLcMsFiAYpp2IvF5KtWRyWDlEi0Sx2W2TMJqcT62iFRDJvx04YcBgj9QK8IwiffKcVRFycryYV1PY2QWDYsPy8NN2ZuwkuNesGRWfl4UCx3yxWmyrLnGAIHhB9JimyFFFYaqKFhkh2XtmDkzUF8/59Ch43t7GUvcW7KzwcohfkX5fVmZ8rOf1Xg84Ycfjg0MWDgOqWHSsC5gPuMCBABpYt9wI+wn9hDX0hqE4mJAlg5hIW4BA5vIZl9avLjirLOOdnU1bd0akGWbUZC8hkyKiam6fo5MVxwk2xS/E2KzMDXXBlj2vC3kaYTWAaINw0yndy1ZcuZjj02NRgePHk1fcUUoHreKizGizB6P9FlxnOFcLvid72y44AKMm2hpsZ55BgriEDyydblGhbuw5fO/hM9iWxiZyXmw8Qv/GQppivJRKLTm7rtPaGgAHKVaW6UPPpCCQd46eXLS1QUWPfsXfOX3y2TJvIWuMgrQwqlEJ8QX7G+YcuB3Qp20ac46+eRp0ShsYkp1tX/2bCmTASaJZWAbgkGvZXlzOSiuR1Vrq6tZkEX4Cj+WBTX1ZjIQu8K6Q5h3jBtgZrBqGA5tgrgvpgjgIN4iZplO29lscMaMOQ0NYvttO6Dr0GFhzMAaIkXCG2HNjNW0Kok8uUOI694lb3euwzzmrFn12bTIlsKyXN3cDE8idCWbVQ8fFvPDhHB+LvcXRRmvra2amCjN5ToMI8TLkKTOcDiFBet6Ly4MBAABc2QZN7fZ1UP2BAHsHoWd094qRIkESvGHlBynlU9MZHM5P1YFBO3vZ67i+kUiSA6ZpOAUeZRiSxHnY3BmVhgZFirxSTgXaoxfgkHiHTJwX8XsgczB4JS//tV4/nnvwoXSSy+pw8M2Rvf5sHvbwuHEnXcuXbYsPT7e09rq9fuXLF/OUy3btKlzxoyxoaGy6dMD4XA6lxvfsiW8bRu7pWMcEHMiIxRGjlky6kBdyaTZssCfAt3d40ePFjc0OOSB2dbYoTL0sOG4rpugR6yFEZ4FhyPkO4UTFz4N/zJIwPt5PKosq6lUXNctyAZQ7ziT994LRbJNMwhAsm0tl4uYpm/dunVnnSWQoaxs3owZ0jc+RT7fyaed9s0j1uuvm6xyRBgdJoYs6Dx7FfNjZ8FcQFHgjzAfr65DdhIWjIlZllrg5/jHNC0akMksVmFHoxhHwbDM+XEv5t7kRDXmDxCk62CwQmhpLvdGWZm9enWktNQ2DBuKXVuL45rXa6VSmcHBXCoV6+yc295uHD6s1tVJ5E6djg65pkaKRIRMW1udo0fFPY4ckVIp58sv7S++kBmZMAMYHvtP8nNeQkqm8ZbjWMTDvB6PbpoDAELLata0GDktJxr9dMUKfdcuaN9oOFwiyzMmJsp9PtDUVCYTgGu0LIOdHFE9l7GQAQs9gvHnFi2CbGzGNFyQyWi2vXnGjBm//vWy6dOl//2TlKTczTdH33/frq0VwK5p6tdft6xfP+3BB+E2jI0bnb17MT/hjaC3uH006hTILXNSUjZvMNg8Nvbl1Knlth0dHa00jCpsrNe717L2zJ5dvHIltlcpKzv/e9/z0rZPGEZXZ+fk2FhJRQW29+iePXNffvnLVKp/xYq6gYHj9u2rg2IGAg5zCiKeQncYzDweTSKyjmkx/YC8rXh8Vn19E68Wm3P4sJRMCkoEzccWjY9L06bJVVXheDzc34+vpI4ON3iQ5a6BgalMeKJRH9QP1hoIpMDPsG+67ocJ+XymQAgK5jwenLN7cnLvpk3rr7wSujrY07O/s7P/nXfmffBByw9+cOlNN5XnuVrhE/V4Fs6cWfhzYWPj8KFDdmnp1TfdNGGaB957L/7444t6ehxGZgpXZAoE2EVpTJiY2bsuKxKZv3evfsstSjKpdHVh/XBCKuSkaRYUAUbu9do1NfrQ0N/S6cSsWeXJZGxiwjSMIUUZmT8/RPrTfe65n8XjxYnECFRx7lyoIzStaHLyhD17ai3LJgOGWg7r+v4LL/zhrbcS1ZCmV1VJS5emgHznnnt+W1txV5cE9eEAq0AeoZnYA0wVKgndOXiweNu206ZMsZuaSk466cT16/sWLRq56qrynh4zEhG0HMZM8YmwXAHdK1ZwqCHGgmnRL1ByJ5GAxx+Gp/Z6cWhEVaFaUbhlTYNfnZrLvTRnTsntt8+fOzeXTPa3teV0PVBcfPzChVFyfdBXuKUEPKrjVCP2IG6clqSBP/95yl13AQ6wA4DXLxXF+s//bAL+4zM6au/Z47S1OXv22IcPA3Vs6NSUKfCFRizm3H9/CE5+bMy4+Wb700/lfHwvYlIgvKZh8MyTT1YuW4Zv4ps3h3/xC/gXh3bRIafNYY/GvEximgZF55AILKKkZI9tf3reeTXFxWnLKp01y6coR/r6TJ8vqCjGc8/Vrl9/yoknkgsqm11f/3eKB0Us0vWiAh+ig8FcbjpEwz4fik2gWlJaKr7r7tavvdbp7hbGhqs0DbAiDKqtTbir9vbm115bev319q5d9ocfKgjgYIkADswWjJAwP5ROd73wQvGyZdCm8Nq19qOPqkSNhEoXcEtVNTfC4gQFc24iW/bkZO+JJ37/rruC/wixUul0xR/+YM2eLYOEDA7CPcK8nYkJzFvoDyKqRCIxNPT+CSfMOvvs2J//XOH3gzDZgO7OTqAXB2SQtxkIKKSr1q5dAHmFeL+gmeSQOX52gNi5nNrdLXbi7bflQrSYT2IwHYYmal1dExMTFdGot7TUCIfloSGHWQbUlkHbNDUHrBi35/iOgzhavK1pq+Nxz5EjDm5z8KBwITU1NuQNJSkr8731ltPVZd98M+gHpGuRDssUQioUmgCuPqyoqNi4sXHu3PHPPpMfekjhoBobywCJe+n6aElJXVGRMAF4Lw65OZwklyEIAuIKREuyXLJ3764bb1z5+ecKEJgNGD4mT+yYFIb6+8e6uipgIIDeykp1cFBYK+ZPkKGAdUEowAA3pcJMIK/h0JmqlhbpW9+CmO3x8aQqUjIBgC3moetqURF2JqeqLTBFWa60LOi5qQOI/TrOMc2saaanTVu3YAHQKLhoUQu0VJZLdb0sFLI40gIEYjH19cVMd0dGWMMFQ6CghzM4bqrEsj6urKy/5prJeLzkwAFBVGC37Ns4dHPJjZUYG3OzYlOnSvv2sZ9nhiMUB/vorjOfJXSTJqaJZezz+fZNm1ai6+Pz52NmAXCSwUGs3O/1nnjoUNH4+IuzZ1d8+9vFweDhw4eT7e0mNrakBDupYjaBQN1xxzH2qqtW9T7yiAhlBwcb//SnmSCnnF5T1Zpk0kSAoaq56mpEBWYhCwn7hBqTumE/DcsqmT9/1aJFyQ0bnH37lEgE5ByqhzuKHaJUFDQrRUrhgggznJISERdxSpDMOJ8WoPDNNQmPBxcnbHvvOeece8cdcKEgycWUB00SGuEn9+KLh371qyX/9m/L2V2ffbZBx9V/ZPBBv//0s87i34ePP9669lqAjUBO3KunZzQeD1VUpDdsSLz5ZvXkpMQGrOs5+Aioqq6XOM5exzFmzRJmu3RpHBoxMTGsaQHbrkZEAXzGymFZ4+O7Z806M0+WACLMqzm0FgSb8Fhjgi5TgtNhBktmCVxZXVdXxQhBQb8ciRT19Qnxl5WBzTdqmgfEA66CoksP+8lEQsgVR8bH5TlzJLJP8aFw3+npKXvxRZNTP8T7g5OTOqhLRUXZokX777577xNPZCAIjwdcORmLeY87LhSJmD09ucrKiyjMDk6ffvCnP21ubq6YPx+M8OBXX012dOhVVaGiolQ2O2/DhtqyMveOmDNkxxlyTsJxJifX1MSIJ3PymakfzAYENRxWsR5gL6yfw06E/sBbICfYiM8nyGF5uQKggto4DhQVTJvxQx0fH1u40PP008VglJ9/bv7kJyI0ZTyHEDEJSvebyeTgbbc1bNrkTnJiwoYcECHJMsKPoPT/+WAZKccJyvLf07HOTuPCC7EcOxxWKMHs8hbE7ZwowO0ByzAGBUoF+4a/cpzheHxnJlPp9cK6gDSYHPSncmysFlSxpMSmkAXeImGacRJQSNfLPR6LoTUYNL76avjAgeLVq+19++BInfJyptwS5dwcgi6vpkWef37ilFOiiEAkqRYeS/CGOJslollBXcE9/pcPjDKcz93kKX4Szty6/36O/sFeJIqohYjJE1ECgFMEFHw7lL+FdU0mk1uamlbfdVcsFOru6PAFAl7gh2kebm9v27WraffuCKEdrPqVlSsrTjkFCmanUnXbty8EtgOKVbU0lRr+7DNn9WoTZyKCpYSDm6ZHlELhG1h+5OhR/YYbzMsugxN29u+3cQnoOi6Bvxwbwx2lxkb59ts9jY3urn7yifn444KBlZbifBiasmIFOKZz4AACNbu5WSgzbIrs9FgVie1IZBdOOMH1e9BY3mqv12uaB2U59dRTyxcv/r/lOuk4I7/4Rd2WLdjVRCbTf++98y65hL/qGRoaBt/88kvYoZ1IvH/OOWf++79nP/tMuvZaQLdEiV64Ys68yJRSFd6IQn8Old2MfyEBBG3KZodmzvQ9/3wZItbWVuOKK4AUCrErcW0eDmRKFUqUSHCzGlxgoDQzB4IineBSFo7+OcMCNTCMkKYVU/bQaW42vvMd/VvfMs87L3PTTdnJyYgsT7nkEiMUghUUBQIznnwy9+677AqnVlTUP/TQeH29kkpZsAXskiR5Z8zADkssZiwMi+TYm1KzNrloYdjQAk6AcrTAjgRWVlSU7uvrRUxGAZaA93AY/gaeT8RAsZigorAFXI5dZQrJiVcSAaevZfbDEB/vtUtuOWgEj4Vnsu0kVAuY3dpqf/651NZm9/RYf/xjy+9/L3A+FlNIZwBgCKqkW2/Vb7vN/uQTmFBZNBqZO9cCGzdNdXISJEwFf8zDNQSKgFGQJNgnthDRMkCFyijMw9wEPWUmxL8IhnV9EhtISqFStUBsCYUEEhUxZfa0nF0gSuNwNh8qQF6Dq0i8QAqOyUdzOa+QlFZxJ9Ir7IAPQ+N+ppkKhztHRo6n0pfMaYpsVsNuYBKvvWZu22ZMmwbuocEUsUjK3QMeMJQMLOScA1dYqPjqOkLeSSqUCv4A1OFAlRGOYg+9uDgGzMtXT8RSsU8Yjes+zMnyjocrrG5QBIlAFpwJpmys5u5qnsRyhh07HMCNiabZ06Z9HA7rhmEZxoEZM05kCjE6KjE1VZRWy3p/8WKA7KpDh6IdHUZHh0jxOU42nR4MBlXKIkh0V4fqYO4iOf/GWXgchEC5hkxhENefXeaIP4NBT77IaAOloB1YJ3QYCsJlDXalXL6lfLObtMqvnxNggnhw2CBuwCIh08JAHlVNjIwI7Z0xw/u73/FufLuurhrWArb8H//BqUJcdWDRorOfew5xf+dvflP1zDOCq6gqaMDWaHT2RRdhgxxELfDVVEPmormb4uEMZj4NLnOVjFRA5iIwxI0jmcyY1zubUtZusoaTcPlcMrNjLvS6dUnO+/K3hXNIrTQuz8L9OsxLqNwKidqQIn0Fs1k8b94xX79nj7V1q7Vzp0zfArcWKUqFz+eXpMCNN4Lif/7WW4GOjq/mzVt4ww0rCeTt554T1l5U5JZUITuGEC585/OPvL0y7xXmQFYHu8hms8bs2cWkgyIa5awN+1UWIimOW5SlcTgbyRkrsZHsDjFhkeLhJgLWMciPS6SKglChGDtD8zOfesppbZVBuQA5CFayWYUywOJPx6nft0+/4w7rttuCkcipl102dvHF7W1tG2tryyl9aW/bZr3/voAArqRzYZmXyrUCTqBCV0VKMScVnA1H6Y6jg3XV1jKXElIG/gEyKDx0CK5YNVwTABJh/9i8TZE+c4N8qkXLvGC3/wHaz2UXKtIinp62fbve2Kh9+aW1ZYtLG3AmdAmRLVclScfg+jxvvKE3N6v/9E+eNWtKYrGmuXPF7NJpe/t28+GHJcTiHPGza4XOM3Un8HTrspg0a2NRkc37TIYnLN2ywqwCJCaBf4yXnIvmFgyOscgKXDQtZDU49Y1vISCBYUuWHGsWwd9UnhZngCHiCJMVcpXcViCzBnIhCmpJhSIxOY6ra2rkVauU0lIJDgm60NEhc0GETQgLwDZy1ZsU262SE8Zw94lbNyzUk+H2stmB+fNjzz7rR1j/wgvWAw84kYjQBWIybL2FLLcb+gLtqSrKGQXhHbAE5jkieKA5udUAyqE6XBkulAKIeIrcNWdeuHJDkbrLZnA+x9m4DZX8hRuA0RJndiN19hlYMDmzYwVrBhUemf/lrxhyGXUsy25sVKqrRTafIz4GV272ALCBn1DngRAWnwBBUIcNC4V7FACBGjtesdp8Rccme+CCrTgCeMQacAHrTN7MZG5/4FpOvtjPGyjT1slcx4DLYZ+XL9ZxZdQdX3HbiDj5xBlGt9pC0rTZu0D1WludlhaZoE5iG+TMDrWUuDyEO1LYvTOSse4w6yL37na7CDfIlXEuZ0FAWAYLEpeRknDmhTsoRJ2S7800gCkrlefc6fK8WcysXVTFc4WIO2IlRJjcdDFmxj6TTFpLpYCLOiu85hbAuP2DK72cpkIYipjF5tAqrxSCZlEOx3XR+Xoi02fFtexvJPFEeZZVNJ/owso9qZRX132plMqoxqXzAlfhlRPpd3t/uN7LPWgse2a5+ayowq0j3H0EzMd68t1gqmke9Xq/xDncyEBe1+1ZY5SiCr4my722PYH94JmzLTBHpsqjW+7G/nM/BfXukB+nsqhbrWYZc48QMXVRcVOUfkl6LRJ5OxaLY1ZseHmJSoUuAyrYs1IoVAd1G3BgJsGg8O34l4GaUhCuVpNCis3HnpOkMN2d8+YN/epXI2VlWt5rFHilRIVOKNVANvv6eef1XXopN1OohoGDGncAshkzx8aFVC5nRBRZL7e14xvdVEKlUylWMDWdTqbTfzj99BU33lhaXDzw2mvRJ59UuFqVTmvgzLpupdPQLom0DrLD1osSZiLhKTiYTAbOxmJVJPt0KLIRfQ2YPUJp7Jht+0jRhnK50PLlZ5x66tDgYOLWW/2RiM/rtRXFJA7vYRhLp3dVVW245ZY6TRt5993Q2FhKlicwH0mqDIV8imJREI5gxl0/Y5toamFl4JwLFdq4R8LhRAwk5zhHA4ElV1yxhEJw+/LLs1u3ygik8JXP15dM7mho8Mdi5QMDGN1AjFVWli0rQ3gERfCPjoaGh+1czvR6546PT3Mck2kQkIY3wetNZ7NvVFbmFi8uNgxfe7uezfZXVp528smi8nP22Tt6e3O9vYGBgfldXQ0IARznnVAoUVqa9PkqL7xwGlUkO2+5peWDD6RYLFJfDz1vPnhw+fbtJbBteErYAgImbqYAmopGJAJPh9K8Tj695lo1oY6laZW6rlKoyNlAlQMuny9jmn9ateqUn/8cYfBIPD4xNqZ5PEVR8fGRNWYMY3R0NJvJePz+iYMHh++6K5bNWpQAY7P0ZDLvVlVNfeSRE+fNg17Ek8mcaa4rLuZgEkNd+OMfI2RDEDO6bRuC0AFI/L77Tj3zzLAkhfNZnuXnn4+fAvmFIiSgxlu2CE5GYMQNkQ55R60QRjn5Crrb8EQQLQwvkykyTf13vzOWLBHs+fHHsXgRqchyIpM5/qKL5lRW4nAYgTh+vgljkuT3eEroW/Gpqhpoa7MefdRB+MECxewdpyEWmz5vnkop3mo4f2wIlKWqyh0JBO7jjysCgYp9+wyPp1qWa955x9/d7SxYIK1Y4Y4cj4taNCa0YAHCZq27u+jTTy0MRXrkdvcyvxAx/KJFMsVfBe/HqRYPba9BjXm6omTS6Wx1tWWaSjwehGGbZiAcVi0rs3hx8OKLlSlTHMxyaEgwhJ4e+9Ah4agbG+Vo1F682HPqqe7swUDvvFOmBrECbgvZL13qWbLEicdFum9gAORx5Pvfr/jRj+AR9Isuco4cEedR76hM+Aoyb2ra8BNP1J92mrN/v3nddfbYmNjGykqR8Qb35Ow8JysLvWnkqylrSTGXQ105TB6wno9M81B9va+42FZVMxTyVldrqqpFIqqmGfF4Lp0uHhlZ9be/FX/yiQH2A/FhhYh7uYOYegodqnQYAKGHH55y5pkiSQq6mu+LPqYL2If//m9p5064VDUfMPhefjm3fn0QLhZyhGZy9MM2STkZr2X5/+u/nKYm+fnntc5Os7RUXDs0BOAwoJUcPAKlKGBwb0fLFIkVmWpTrh9WFETCO/3+7muuOfPcc0so4vG6FYr/WUCUpOS998pbtvh4lvBDHMRzBYPQWA8E/NlsfOvW2Lp1PiDk1KkWtpdidO6jZb4Bt9fn9/+tutobDlcPDlaMjPgHB4euvDLr85nAfE2L+3wqgFrXU8GgAWsKBv3pdOXhw0Pnn99jGIPLl2NjVECjJNUNDZ2QTHrZ6XBXMhw++B9TMVF5yDeqiX3mIBEb1dS0ceNGX77iLo2MSORXBaqNjsrl5VCeUDIZGh8fkOU9mmZHo9ifDMQJbVfVMLWkVqjqcbatwEkcOTI8OFhbVaUMDQnkZErDvXPw8YaRzOXePP30k269tSoWGxkc7D58OIG9UtVASYkXhFyWcTwYDALtgqFQOBzWvF5A/HBvb39zc7SycvmCBUHqgMGwI2Nj8bvvrn3/fYvdLTfQcEc8tXPnVbzQsW/bps+3rLVV2rwZblP+4gvRjzMyolJ7s00eFWuwqquBdkNdXVtOP33ZD35QU1WF2fj8ACkPdiwxOZnOZDKjo32PPjodNqaqKUBRVZW9e7fDSMFPFpDK4QeuaM26dQumivaQ8oYGbrr7f30wme7uymBw/rp1boMTfBgx3Gg4bIGS8wMLhYwH54AozNbYJDh16Pawwn+Cdfzyl5ACHE8SpquqI4D7bDZKA2XT6ZqWlqDPNxAMnn7bbfNrav5uPlVhchkzZmTXrbM/+cSvaRblAx1ACxf1/H4GDpEGDwSiPl/42Wetr7+GmgyEwwlFmd7fL8diImiDuvX3Cw/ywx96jjtOmP+OHdavf409ELAUCilAGY+nq6iooaFB4PCnnzpffy2VlbmRFi2KA08R8EUiGqe8Bd/iBwxI0UX/VzA4nkq9snJleVOTMT7unzYtUlraMTSUhIWUl0/s2rVo587GcNh+910Ebg5C3+ZmofYAFYizslKZPx/eQt28GfQL3CgxMSHuMjAg3B53k3MVmut4wAgAz29/K01M/HXNmrm33TZx3XWlHR0GxieYBWHsPXAg8OqrFaWl1tatTnu7VFIiUa+lNjDwvs/X+cMfxkABOztFKzHWwlEthRAiDUo1BhE5gClwFlcgGAyP66aUf4CiHC4tXXnPPU1U9fl7nVq+3NizB+TQ8/DDomPZstRCao7gEUegyQiqATbYKOCv6HdqbeUuAbe+g/vlch5FyUHZ4LRVdQJotGTJggUL+q64ovf++6FEIUXxEQTmRkfTQ0NYsCjuwWtQZdhQlMF0+utTTvn+1VeD/0488khWUfy6HvX7Teo7dCgUc7sPqRCrcSO9AC0uDkBslNYyvN7ZmYxoJYWc2tqcvj65oUFs0eio0tio7tqFKULVD3s8H9TXT0Fg3dtraBqYUy4cjhpGdTo9FaNjbUAax0FMI+rxY2PwMQ63fZPDgH/cqWmHV60qCwa9HR0DuVzTmjWYXuTyy/8WDCZ6epS+vsojR5YfPRpQVWicwNexMUDd24oyPG8eBJorLz9x0yaMkz733B2joxr8Uy435ZNPlnR2yoUUD5wu17REVnj5cs4bHHvGgqAcugHuobC7yoiP5fEEdB1DY+tUSqb16PofN2y44MYbAZK9vb3BaFTPZHyBQDaRGGptVd56a+mOHcGiIqCINH26Z2ICDpz7pdwEoq4POM6OG264+Oqr4fnGaXKx/1kNBBofTadDV17p2bfv6GOPHQfHdtllQ4bx8T33nHTxxUFg1T8CteFEwv/d7/r7+jjL7QaCFLpqLpRBjQs5awISLAoM9q2ysiKvN2Pb2WnT/EVFvqGhbCplRKOg8k3t7QOx2NnXXtsAc5KkCiAKP+GFT3W1NGvW5Nlnj91xR/CNNzzh8JednWWmWYOAibMwlE6Cgw1ns2doGteBY4OD0vAwLpQoteDs3Alo8FRU1Pf3m0NDIDMDL76IvUAEA5Jx1vbt8PBOWZkze7bMfa0gmAcPSjNn4szyDz80AHWU+udeFrG3lIoSjWlu4opCCplSJLZPtHL+Phar+c1vZlRXIwaKYcOpsyxHQp3o7i695JJ2zPXVV+tqaoBY5n33OfBYMK1Ewli/3vMv/wIal92/X73ssmHTfONHPzrPcaqeflpUjwtkG+jFrnLaNAGWnZ0AlZ5zzql8+OFAV5e+YYOoD5O7hiNMZbN/uuqqExcvrvzJT1Qx76ydzaqy3FdTo7z8cm1NjXHDDfb27SKLKBoyJ9zkFMfSnC2kjAUlNIiICGVjHCcGYqVSM0OhpoaGcp+vRtdB8WAJoXi8tK0tkExWDQyINuV0OkVFI7u9He7H6e+3Ozqk3t7UM88c/vBDQdFiMU8w2FFcfOrFF9dccEHW41EKCSAmt7gWU2luhshEo0QkEtq9O9PWJh05Iko5sZgSiXgRD1jWAUVpWLq0fu3aRHk55GLRozFqIBDu69OhGgjI9u7VwFJGR4Un42ZxzmPmiQYroMaNUBLXOOhhC0HBUin4g+NaWrA/UDALy4DzKC3FCm2gPEGlBoDVNJNoqgVKRM95cCvZ19ksiMc8SiMjLC2NRIogzfp6u6lJ+vhjbmAudH4KPgC27PF4s9nJyckQFOPqqxEk+oAxhpGy7f2O015UlFy/fiNllEf+9V/f/e1vrWhUAyimUqHu7vArrySKigActZpWDeLElINbkzh5yFQcaxTEg5+N4JwWVbfcDDgIuuN0ffHFUVWtxz5kMs7gIHQhijAV4iBKHLCsMS64TZ++o7FxQtex7Qm/P7Ny5aXUlShaCTOZdCDgjI/XlJf76+qc3bvl/HNbDvUCup0Htv16JDJyxhnlmpbq64MOVx89OqOz880TTph66aVLFi2aCd9Opj5n7dqGtWuZXuWE8k7E29sRcpvr13/V1ja8efP83l5+cOBYYanwkKfo8cg3UotYh5vqqSnMa5p7ZPmjjRuPP+20vS0tMCGV2pjN3t7wwYNLmptjgDXD4IKbv7r6uBdekCndB/OvpMIXGLj10EOwfDD+qooKzo1y6wxnPwVeUDZT3Au3vuOOa844AxaYoQdERgxj5LrrztywYRb4Y6HjjnonA52dUnExNtAbjRZFo7XUxCA+S5cOrVmTvfxy38SEzFnXfGAoeDG0XOwwVWtlLrhwWyKhmW5ZbWvXXnvnnQIxC6E2fYay2e4XXww/+aQYgWEZYQr3iLqRVMp+7TXrnXfA8gCViJ99pF0KMUq3SkAQYlMHDNQ+WlIybdkyJd+DKg0MVH/4YQUY1S9/aeBGljXZ1OT/2c9ARazbb7cATmVlQGPQT6m8XJ47V7ngAplyBhW9vToljN0HR7hXlos7FEJpblcjd4fn2/AgeDOXq6ut9XPQumOHDRACZ8ASN22qKCuLXX116r33Ih9/HOrvl+hpF+vBB+3OTgCGCMQw9IEDwruQ9wun0xnQLEBjS4voLaDOMFf2lPREWDcd0f/115vz5gGrna4uZ2JCGRzUqG9AHR3FrAaBTJs2zZk1K9vT4we3Q+AFjevuFuj39tvOSy/Zc+aIW+/d6+bMIDV60EDsH2bFHSCii4dvzPUefoiNDNiDaUEnKfNiPPssXKJMyc3W0dHG++4TzyTNm6ft3Vv3+us6YsN9+6xXXhGFFcTP3LkJhANPzmSgojlsOKKl6mozmVQoe+g+zEkuUDy0geAOtoBY6oMPxPMIEI3Xm/T7WxBLhEJZbJ3fPxoOn0PBee9VV+15/nkRNmD2huGbnIwMDFQODcW6uqCoqt9fgpBNUUyuobmPo2kuAxEqzSm1QsKFq2eUeTa5tMU5Z2pn8gClDh1K6Hqp1wuXYHu9QfD1n/5UdAHDqDBpUl2RpqXkqENo2VFVtYwyW3ZDg/L551L+EXJREKL+qLdAfZcuLR4bA1IgzprX359W1b+sWTN93bqGuXNLy8o8Xm9AUaI0OEy6Zt06jTodTWpkHo3HRzo6jhw8CJeApWqtrYs++miqYVjcoMNPhHKFSeSuAVG8VG4WomhJsFxcnMvxDsM9eqnzaSyVOlRZeQIX0CYmHH7QEfpDGQwI7iPTnAyFjh8bq4QQYZ8ez2gikV64sIY6zrT6eveZcS47pdOaLG+bPn36gw82zZqVhA+jJ7nGdu+OP/XU9x54oJx1gSuVhc5iuInubs4i+ELiUxEMSoilTzqJT8hKUtc77+g//rEGyODOTW4FF2nHhOaWiLiNmZyVUACs2ecLjI3lLMurql1r1+7p7YXixSsrT7r++gCxFgf0gBqHRCwNuQSDnbre8s//vOSMMw7v3v3pq682trerk5N/WrDg9I0b3Q7XRMLi56dJg7DDRi5Xv2DBKmoc9fHy4vHK/fsbMfiVVxogrZOTzuSkmkgMf/e7ZddcA1Oy33zTvOceUXOGBpWWKgjmYjG5pEQ+9VSF4BrrmT1/fo7L5Vy+445hGEskkg8eCi3H/AQXSIXjfFBXN2fz5pjfn4V3NQzQxoiiMChbP/+5uXWrzGUkcqqabXdZlrF58yyaPaK2zn372pubV553XkM+52pceqlz6BBXlR3KocLsTTCNSy9VSkpEUA1UHx5GQCbTA3nu42SapmYybzc1nfrCCyEY5hNPyMBtfj4ZKknVDBH0QolWrlThUDIZ+y9/EfuRfzzcbbuj/gOB0txQe+wxU24iNk0/jJ+0F3GvHwfhcrEte/daW7bYBw+K7jB+4pFcPEIohITx7dudWbOgdZXBYOXq1ctXrz72ZNof/mC3tMj5J0GZDAG0ATz2U0/ZHEVxJx6VbETmmDuXgCa27W1oCHLDc1WVxadxIYrqQW6b9EcfWTt3ii2NRMTMuVILJserJbyk/i9+NJPJVr5UD0CqGRrKDQ1JU6ZYjz1mvfaaG1FR7sJ9vQBX9KglCd+qoVDs2WczgYD/8suVwtPP+IyPiz6Yp59mL+BQXwdXA/hpMYWSW25xnPWQatyiSkZVAVwYA5ciQiIj0M0/S89P6bLExQ87QoxAFV9RkecywzfeB5DvAOCEC9dy+clTXJRMmscf762osN97jxNAMmWtuaGOHxg59gwfh9rgEPhz9my1rg58QDhkRG0I9Pr6uNPDLbtTUdt9tIY4j0yVUYcYKxMBt0uOpivaAiMRzwMPYB/N228XqXaySbfdjBP02IxCJxqxGiGyfPufTKUWCg+hddz8xM/wcLcKV0y51YkKsFxfFVMsvDGBi0/8PDA/Ts6QiK+Af9xHwu0FhcesOJnIlxRaSQsNEUzx+Som9tz1wQ8CYT0ENFyXLTy64VJjtix6St19IQM/jFt4eQRXV0QHAL2vg9kmjyKTSNzOGq5T518acSw/BOullHXh7RFcHHOr5Nz8y+2D1MjCDY+FV2i47wMBMwUhYc3Kl/LcJ9ipE5UVSuwYp4RozTIXrrmJlkO0QuE+FCok94+9xIM7ABmqvF6NOx9l6i4v9Mm7/LPwYCQ1P4mpUDrbfV0Iv0Cm0OrI9+BIKN+Gw+9AEfeDqnOPGHdZ8vsq+EUEvG/s/1lkhVIGV/Z427EeFl/hBN4q1kTuNuFVsQKS0gk/lH/HCI5AvgqHxa5caU+4V921TJ4W3UCUyAvNcsyiaK84mX7s7SmYN+0JFxYKFbBj76UpvJSAuxLyD0WIjn2WciF7zmUh1nZq9nBZA8ZnRpiPhFz/wsE86RFPr9DI6lDnFWaruH1HuD3GpWfg+H0CrvYWGrXzXYPCVrlNstCwQFjFzS7ce+42b3AHOrdFFHqTOK3D5phvk/xm+xM35rlPYHF8zi18kAhDKaNjoWWBxUp/2twfBDlywyoDGNf36E0DtGBqBHD4LTH5J+Tcahj2il4OweIoPHMv5V/tw76EH993Ci25NILMas9NhPmUKOuCm/HHMihn6L4MgIsD4AwsbtwCk8m/u4UVjV9KxJ0eNsdbDBOEl06+yZ+vsmHPsERaKquYRE/EuVAsF/o8C8+A8PtaMJXCe1kKL77R8un7wmMM3JhFyOzQGtz+Ku7k5do6M9t8R4uL21Rt4WbJgmVKBUsu9M4zwufTyYWHzdhT8FuuZH5nEDec8ZYUmjgga36RRDb7fwQYAOkxP+Fzk1pGAAAAAElFTkSuQmCC')))
#print r
# test remind:
#r = c.remind.unread_count.get()
#print r
|
michaelliao/sinaweibopy
|
snspy.py
|
_encode_params
|
python
|
def _encode_params(**kw):
'''
Do url-encode parameters
>>> _encode_params(a=1, b='R&D')
'a=1&b=R%26D'
>>> _encode_params(a=u'\u4e2d\u6587', b=['A', 'B', 123])
'a=%E4%B8%AD%E6%96%87&b=A&b=B&b=123'
'''
def _encode(L, k, v):
if isinstance(v, unicode):
L.append('%s=%s' % (k, urllib.quote(v.encode('utf-8'))))
elif isinstance(v, str):
L.append('%s=%s' % (k, urllib.quote(v)))
elif isinstance(v, collections.Iterable):
for x in v:
_encode(L, k, x)
else:
L.append('%s=%s' % (k, urllib.quote(str(v))))
args = []
for k, v in kw.iteritems():
_encode(args, k, v)
return '&'.join(args)
|
Do url-encode parameters
>>> _encode_params(a=1, b='R&D')
'a=1&b=R%26D'
>>> _encode_params(a=u'\u4e2d\u6587', b=['A', 'B', 123])
'a=%E4%B8%AD%E6%96%87&b=A&b=B&b=123'
|
train
|
https://github.com/michaelliao/sinaweibopy/blob/0f19dd71c1fbd16ee539620c7e9e986887f5c665/snspy.py#L90-L112
|
[
"def _encode(L, k, v):\n if isinstance(v, unicode):\n L.append('%s=%s' % (k, urllib.quote(v.encode('utf-8'))))\n elif isinstance(v, str):\n L.append('%s=%s' % (k, urllib.quote(v)))\n elif isinstance(v, collections.Iterable):\n for x in v:\n _encode(L, k, x)\n else:\n L.append('%s=%s' % (k, urllib.quote(str(v))))\n"
] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
__version__ = '1.0.0'
__author__ = 'Liao Xuefeng (askxuefeng@gmail.com)'
'''
Python client SDK for SNS API using OAuth 2. Require Python 2.6/2.7.
'''
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
import time
import json
import hmac
import hashlib
import base64
import urllib
import urllib2
import urlparse
import gzip
import logging
import mimetypes
import collections
class JsonDict(dict):
'''
General json object that allows attributes to be bound to and also behaves like a dict.
>>> jd = JsonDict(a=1, b='test')
>>> jd.a
1
>>> jd.b
'test'
>>> jd['b']
'test'
>>> jd.c
Traceback (most recent call last):
...
AttributeError: 'JsonDict' object has no attribute 'c'
>>> jd['c']
Traceback (most recent call last):
...
KeyError: 'c'
'''
def __getattr__(self, attr):
try:
return self[attr]
except KeyError:
raise AttributeError(r"'JsonDict' object has no attribute '%s'" % attr)
def __setattr__(self, attr, value):
self[attr] = value
class APIError(StandardError):
'''
raise APIError if receiving json message indicating failure.
'''
def __init__(self, error_code, error, request):
self.error_code = error_code
self.error = error
self.request = request
StandardError.__init__(self, error)
def __str__(self):
return 'APIError: %s: %s, request: %s' % (self.error_code, self.error, self.request)
def _parse_json(s):
'''
Parse json string into JsonDict.
>>> r = _parse_json(r'{"name":"Michael","score":95}')
>>> r.name
u'Michael'
>>> r['score']
95
'''
return json.loads(s, object_hook=lambda pairs: JsonDict(pairs.iteritems()))
def _encode_multipart(**kw):
' build a multipart/form-data body with randomly generated boundary '
boundary = '----------%s' % hex(int(time.time() * 1000))
data = []
for k, v in kw.iteritems():
data.append('--%s' % boundary)
if hasattr(v, 'read'):
# file-like object:
filename = getattr(v, 'name', '')
content = v.read()
data.append('Content-Disposition: form-data; name="%s"; filename="hidden"' % k)
data.append('Content-Length: %d' % len(content))
data.append('Content-Type: %s\r\n' % _guess_content_type(filename))
data.append(content)
else:
data.append('Content-Disposition: form-data; name="%s"\r\n' % k)
data.append(v.encode('utf-8') if isinstance(v, unicode) else v)
data.append('--%s--\r\n' % boundary)
return '\r\n'.join(data), boundary
def _guess_content_type(url):
'''
Guess content type by url.
>>> _guess_content_type('http://test/A.HTML')
'text/html'
>>> _guess_content_type('http://test/a.jpg')
'image/jpeg'
>>> _guess_content_type('/path.txt/aaa')
'application/octet-stream'
'''
OCTET_STREAM = 'application/octet-stream'
n = url.rfind('.')
if n == -1:
return OCTET_STREAM
return mimetypes.types_map.get(url[n:].lower(), OCTET_STREAM)
_HTTP_GET = 'GET'
_HTTP_POST = 'POST'
_HTTP_UPLOAD = 'UPLOAD'
def _read_http_body(http_obj):
using_gzip = http_obj.headers.get('Content-Encoding', '') == 'gzip'
body = http_obj.read()
if using_gzip:
gzipper = gzip.GzipFile(fileobj=StringIO(body))
fcontent = gzipper.read()
gzipper.close()
return fcontent
return body
def _http(method, url, headers=None, **kw):
'''
Send http request and return response text.
'''
params = None
boundary = None
if method == 'UPLOAD':
params, boundary = _encode_multipart(**kw)
else:
params = _encode_params(**kw)
http_url = '%s?%s' % (url, params) if method == _HTTP_GET else url
http_body = None if method == 'GET' else params
logging.error('%s: %s' % (method, http_url))
req = urllib2.Request(http_url, data=http_body)
req.add_header('Accept-Encoding', 'gzip')
if headers:
for k, v in headers.iteritems():
req.add_header(k, v)
if boundary:
req.add_header('Content-Type', 'multipart/form-data; boundary=%s' % boundary)
try:
resp = urllib2.urlopen(req, timeout=5)
return _read_http_body(resp)
finally:
pass
class SNSMixin(object):
def __init__(self, app_key, app_secret, redirect_uri):
self._client_id = app_key
self._client_secret = app_secret
self._redirect_uri = redirect_uri
def _prepare_api(self, method, path, access_token, **kw):
raise StandardError('Subclass must implement \'_prepare_api\' method.')
def on_http_error(self, e):
try:
r = _parse_json(_read_http_body(e))
except:
r = None
if hasattr(r, 'error_code'):
raise APIError(r.error_code, r.get('error', ''), r.get('request', ''))
raise e
class SinaWeiboMixin(SNSMixin):
def get_authorize_url(self, redirect_uri, **kw):
'''
return the authorization url that the user should be redirected to.
'''
redirect = redirect_uri if redirect_uri else self._redirect_uri
if not redirect:
raise APIError('21305', 'Parameter absent: redirect_uri', 'OAuth2 request')
response_type = kw.pop('response_type', 'code')
return 'https://api.weibo.com/oauth2/authorize?%s' % \
_encode_params(client_id=self._client_id,
response_type=response_type,
redirect_uri=redirect, **kw)
def _prepare_api(self, method, path, access_token, **kw):
'''
Get api url.
'''
headers = None
if access_token:
headers = {'Authorization': 'OAuth2 %s' % access_token}
if '/remind/' in path:
# sina remind api url is different:
return method, 'https://rm.api.weibo.com/2/%s.json' % path, headers, kw
if method == 'POST' and 'pic' in kw:
# if 'pic' in parameter, set to UPLOAD mode:
return 'UPLOAD', 'https://api.weibo.com/2/%s.json' % path, headers, kw
return method, 'https://api.weibo.com/2/%s.json' % path, headers, kw
def request_access_token(self, code, redirect_uri=None):
'''
Return access token as a JsonDict: {"access_token":"your-access-token","expires":12345678,"uid":1234}, expires is represented using standard unix-epoch-time
'''
redirect = redirect_uri or self._redirect_uri
resp_text = _http('POST', 'https://api.weibo.com/oauth2/access_token',
client_id=self._client_id, client_secret=self._client_secret,
redirect_uri=redirect, code=code, grant_type='authorization_code')
r = _parse_json(resp_text)
current = int(time.time())
expires = r.expires_in + current
remind_in = r.get('remind_in', None)
if remind_in:
rtime = int(remind_in) + current
if rtime < expires:
expires = rtime
return JsonDict(access_token=r.access_token, expires=expires, uid=r.get('uid', None))
def parse_signed_request(self, signed_request):
'''
parse signed request when using in-site app.
Returns:
dict object like { 'uid': 12345, 'access_token': 'ABC123XYZ', 'expires': unix-timestamp },
or None if parse failed.
'''
def _b64_normalize(s):
appendix = '=' * (4 - len(s) % 4)
return s.replace('-', '+').replace('_', '/') + appendix
sr = str(signed_request)
logging.info('parse signed request: %s' % sr)
enc_sig, enc_payload = sr.split('.', 1)
sig = base64.b64decode(_b64_normalize(enc_sig))
data = _parse_json(base64.b64decode(_b64_normalize(enc_payload)))
if data['algorithm'] != u'HMAC-SHA256':
return None
expected_sig = hmac.new(self.client_secret, enc_payload, hashlib.sha256).digest()
if expected_sig == sig:
data.user_id = data.uid = data.get('user_id', None)
data.access_token = data.get('oauth_token', None)
expires = data.get('expires', None)
if expires:
data.expires = data.expires_in = time.time() + expires
return data
return None
class QQMixin(SNSMixin):
def get_authorize_url(self, redirect_uri='', **kw):
'''
return the authorization url that the user should be redirected to.
'''
redirect = redirect_uri if redirect_uri else self._redirect_uri
if not redirect:
raise APIError('21305', 'Parameter absent: redirect_uri', 'OAuth2 request')
response_type = kw.pop('response_type', 'code')
return 'https://graph.qq.com/oauth2.0/authorize?%s' % \
_encode_params(client_id=self._client_id,
response_type=response_type,
redirect_uri=redirect, **kw)
def _prepare_api(self, method, path, access_token, **kw):
kw['access_token'] = access_token
kw['oauth_consumer_key'] = self._client_id
return method, 'https://graph.qq.com/%s' % path, None, kw
def request_access_token(self, code, redirect_uri=None):
'''
Return access token as a JsonDict: {"access_token":"your-access-token","expires":12345678,"uid":1234}, expires is represented using standard unix-epoch-time
'''
redirect = redirect_uri or self._redirect_uri
resp_text = _http('POST', 'https://graph.qq.com/oauth2.0/token',
client_id=self._client_id, client_secret=self._client_secret,
redirect_uri=redirect, code=code, grant_type='authorization_code')
return self._parse_access_token(resp_text)
def refresh_access_token(self, refresh_token, redirect_uri=None):
'''
Refresh access token.
'''
redirect = redirect_uri or self._redirect_uri
resp_text = _http('POST', 'https://graph.qq.com/oauth2.0/token',
refresh_token=refresh_token,
client_id=self._client_id, client_secret=self._client_secret,
redirect_uri=redirect, grant_type='refresh_token')
return self._parse_access_token(resp_text)
# FIXME: get oauthid from 'https://graph.z.qq.com/moc2/me?access_token=%s' % access_token
def _parse_access_token(self, resp_text):
' parse access token from urlencoded str like access_token=abcxyz&expires_in=123000&other=true '
r = self._qs2dict(resp_text)
access_token = r.pop('access_token')
expires = time.time() + float(r.pop('expires_in'))
return JsonDict(access_token=access_token, expires=expires, **r)
def _qs2dict(self, text):
qs = urlparse.parse_qs(text)
return dict(((k, v[0]) for k, v in qs.iteritems()))
def get_openid(self, access_token):
resp_text = _http('GET', 'https://graph.z.qq.com/moc2/me', access_token=access_token)
r = self._qs2dict(resp_text)
return r['openid']
class APIClient(object):
'''
API client using synchronized invocation.
'''
def __init__(self, mixin, app_key, app_secret, redirect_uri='', access_token='', expires=0.0):
self._mixin = mixin(app_key, app_secret, redirect_uri)
self._access_token = str(access_token)
self._expires = expires
def set_access_token(self, access_token, expires):
self._access_token = str(access_token)
self._expires = float(expires)
def get_authorize_url(self, redirect_uri='', **kw):
'''
return the authorization url that the user should be redirected to.
'''
return self._mixin.get_authorize_url(redirect_uri or self._mixin._redirect_uri, **kw)
def request_access_token(self, code, redirect_uri=None):
'''
Return access token as a JsonDict:
{
"access_token": "your-access-token",
"expires": 12345678, # represented using standard unix-epoch-time
"uid": 1234 # other fields
}
'''
r = self._mixin.request_access_token(code, redirect_uri)
self._access_token = r.access_token
return r
def refresh_token(self, refresh_token):
req_str = '%s%s' % (self.auth_url, 'access_token')
r = _http('POST', req_str,
client_id=self.client_id,
client_secret=self.client_secret,
refresh_token=refresh_token,
grant_type='refresh_token')
return self._parse_access_token(r)
def is_expires(self):
return not self.access_token or time.time() > self.expires
def call_api(self, http_method, http_path, **kw):
method, the_url, headers, params = self._mixin._prepare_api(http_method, http_path, self._access_token, **kw)
logging.debug('Call API: %s: %s' % (method, the_url))
try:
resp = _http(method, the_url, headers, **params)
except urllib2.HTTPError, e:
return self._mixin.on_http_error(e)
r = _parse_json(resp)
if hasattr(r, 'error_code'):
raise APIError(r.error_code, r.get('error', ''), r.get('request', ''))
return r
def __getattr__(self, attr):
if hasattr(self._mixin, attr):
return getattr(self._mixin, attr)
return _Callable(self, attr)
class _Executable(object):
def __init__(self, client, method, path):
self._client = client
self._method = method
self._path = path
def __call__(self, **kw):
return self._client.call_api(self._method, self._path, **kw)
def __str__(self):
return '_Executable (%s %s)' % (self._method, self._path)
__repr__ = __str__
class _Callable(object):
def __init__(self, client, name):
self._client = client
self._name = name
def __getattr__(self, attr):
if attr == 'get':
return _Executable(self._client, 'GET', self._name)
if attr == 'post':
return _Executable(self._client, 'POST', self._name)
name = '%s/%s' % (self._name, attr)
return _Callable(self._client, name)
def __str__(self):
return '_Callable (%s)' % self._name
__repr__ = __str__
if __name__ == '__main__':
#import doctest
#doctest.testmod()
APP_KEY = '???'
APP_SECRET = '???'
access_token = '???'
expires = 1393739173.5
#c = APIClient(QQMixin, APP_KEY, APP_SECRET, 'http://www.liaoxuefeng.com/auth/callback', access_token, expires)
#print c.get_openid(access_token)
#r = c.user.get_user_info.get(openid=openid)
# test get:
#r = c.statuses.home_timeline.get(count=10)
#print r
# test post:
#r = c.statuses.update.post(status=u'测试http post')
#print r
# test upload:
#r = c.statuses.upload.post(status=u'测试upload pic', pic=StringIO(base64.b64decode('iVBORw0KGgoAAAANSUhEUgAAAFAAAABQCAIAAAABc2X6AAAAGXRFWHRTb2Z0d2FyZQBBZG9iZSBJbWFnZVJlYWR5ccllPAAALPBJREFUeNqEfAmYVOWV9t1qr67qpXql6W6gAdmVZgfFFXEnGjXBKGPUmDijj8vEuCVqYsaMPnGNjtskOo8LiCbRRBExSDKKiiIq0DR023s3vVVvtd/1f79zbhVO/sz/19MPdN+697vfd75z3vOe5V45u2KFZNuSosi5nOT1SrouGYbk8Uiq6gQC+BPH8YtsmjhHfGtZ+NbxenHE8XjEVaoqmaas6/hT0jQHf+JMHMGw9JWED67CgPSVjN8VxZFl2TAwDr4Sg8uyOM228R++EjPBNBxHnOA4+EaMJv6TxUHcF5fgh77CTfGPQ5eLC2lkcRxn4iuMiani1vjdMBRxP8tSMN1QyL3A55PpbDmdlnGNzyeuxBrwwbe4El9hRFqVGC6bxWyEdGi1Mi1exrcYDQLC+RAEBsE8cByrxSzpRrhESaXE1DEViMPnc/x+sSj8sBwxGk1dpsWI2ePygoBwC/zgND5HUZRcTmYx0TrdO7K88C8JXcNBiYWHDwQP4eECHMS42IFQSOb9z2SwPLF4zJ7W6c4Di8QRWRZ7jtFxHOPiIL7FUJmMWAaLD5MjGbMeyVghJoGDuJy2DpeLcfAnzsEEcBf8jgt5rpAsjrPK4E+6o3uQhCvmBnlB3Hwj2nbWCPGD8UkjSAPxwzqD0bEw0mcxBEbExYYhp1JilrxRuFjThALTOa5S4BxMggbBjYUV4H6YE/7lGeOHF4ARsDB88C9Mg/cnPwc2LneikAj+py0VF/Iy+O7Yf1yO33EVzuS74Ka4tSJ0VgxApuSQyFwtowkr7nZDGbJZh7/je2MZJEJsslNUBGUT68cmk9U5pA5sjQUR4Bcb36bTWIaSSGBMVg2HrMAdGUdwR5qB2BysnzUfIuPVYvN5nZgSjmMxLGX+kOkKrQkGZVYEljt9xPkYHydDNxkL+CtIljECNixugyuxBllWsL34Ipt1zQBanUzCkoW2YDjMyTSV0VEX3khPXGvBPNjkfD6vBoUWuOJuC0+lYLrYB2wdxIezYMY4wurHukpw5d6OJirlcVHNZj0AC0iWrFSsgUGHAYUVmJbEB4+pNE0PC5FIHApLnaFInMTQB9vA4iGFcBjXeHRdy2TMbFYjDHN4W2DwZLcMsFiAYpp2IvF5KtWRyWDlEi0Sx2W2TMJqcT62iFRDJvx04YcBgj9QK8IwiffKcVRFycryYV1PY2QWDYsPy8NN2ZuwkuNesGRWfl4UCx3yxWmyrLnGAIHhB9JimyFFFYaqKFhkh2XtmDkzUF8/59Ch43t7GUvcW7KzwcohfkX5fVmZ8rOf1Xg84Ycfjg0MWDgOqWHSsC5gPuMCBABpYt9wI+wn9hDX0hqE4mJAlg5hIW4BA5vIZl9avLjirLOOdnU1bd0akGWbUZC8hkyKiam6fo5MVxwk2xS/E2KzMDXXBlj2vC3kaYTWAaINw0yndy1ZcuZjj02NRgePHk1fcUUoHreKizGizB6P9FlxnOFcLvid72y44AKMm2hpsZ55BgriEDyydblGhbuw5fO/hM9iWxiZyXmw8Qv/GQppivJRKLTm7rtPaGgAHKVaW6UPPpCCQd46eXLS1QUWPfsXfOX3y2TJvIWuMgrQwqlEJ8QX7G+YcuB3Qp20ac46+eRp0ShsYkp1tX/2bCmTASaJZWAbgkGvZXlzOSiuR1Vrq6tZkEX4Cj+WBTX1ZjIQu8K6Q5h3jBtgZrBqGA5tgrgvpgjgIN4iZplO29lscMaMOQ0NYvttO6Dr0GFhzMAaIkXCG2HNjNW0Kok8uUOI694lb3euwzzmrFn12bTIlsKyXN3cDE8idCWbVQ8fFvPDhHB+LvcXRRmvra2amCjN5ToMI8TLkKTOcDiFBet6Ly4MBAABc2QZN7fZ1UP2BAHsHoWd094qRIkESvGHlBynlU9MZHM5P1YFBO3vZ67i+kUiSA6ZpOAUeZRiSxHnY3BmVhgZFirxSTgXaoxfgkHiHTJwX8XsgczB4JS//tV4/nnvwoXSSy+pw8M2Rvf5sHvbwuHEnXcuXbYsPT7e09rq9fuXLF/OUy3btKlzxoyxoaGy6dMD4XA6lxvfsiW8bRu7pWMcEHMiIxRGjlky6kBdyaTZssCfAt3d40ePFjc0OOSB2dbYoTL0sOG4rpugR6yFEZ4FhyPkO4UTFz4N/zJIwPt5PKosq6lUXNctyAZQ7ziT994LRbJNMwhAsm0tl4uYpm/dunVnnSWQoaxs3owZ0jc+RT7fyaed9s0j1uuvm6xyRBgdJoYs6Dx7FfNjZ8FcQFHgjzAfr65DdhIWjIlZllrg5/jHNC0akMksVmFHoxhHwbDM+XEv5t7kRDXmDxCk62CwQmhpLvdGWZm9enWktNQ2DBuKXVuL45rXa6VSmcHBXCoV6+yc295uHD6s1tVJ5E6djg65pkaKRIRMW1udo0fFPY4ckVIp58sv7S++kBmZMAMYHvtP8nNeQkqm8ZbjWMTDvB6PbpoDAELLata0GDktJxr9dMUKfdcuaN9oOFwiyzMmJsp9PtDUVCYTgGu0LIOdHFE9l7GQAQs9gvHnFi2CbGzGNFyQyWi2vXnGjBm//vWy6dOl//2TlKTczTdH33/frq0VwK5p6tdft6xfP+3BB+E2jI0bnb17MT/hjaC3uH006hTILXNSUjZvMNg8Nvbl1Knlth0dHa00jCpsrNe717L2zJ5dvHIltlcpKzv/e9/z0rZPGEZXZ+fk2FhJRQW29+iePXNffvnLVKp/xYq6gYHj9u2rg2IGAg5zCiKeQncYzDweTSKyjmkx/YC8rXh8Vn19E68Wm3P4sJRMCkoEzccWjY9L06bJVVXheDzc34+vpI4ON3iQ5a6BgalMeKJRH9QP1hoIpMDPsG+67ocJ+XymQAgK5jwenLN7cnLvpk3rr7wSujrY07O/s7P/nXfmffBByw9+cOlNN5XnuVrhE/V4Fs6cWfhzYWPj8KFDdmnp1TfdNGGaB957L/7444t6ehxGZgpXZAoE2EVpTJiY2bsuKxKZv3evfsstSjKpdHVh/XBCKuSkaRYUAUbu9do1NfrQ0N/S6cSsWeXJZGxiwjSMIUUZmT8/RPrTfe65n8XjxYnECFRx7lyoIzStaHLyhD17ai3LJgOGWg7r+v4LL/zhrbcS1ZCmV1VJS5emgHznnnt+W1txV5cE9eEAq0AeoZnYA0wVKgndOXiweNu206ZMsZuaSk466cT16/sWLRq56qrynh4zEhG0HMZM8YmwXAHdK1ZwqCHGgmnRL1ByJ5GAxx+Gp/Z6cWhEVaFaUbhlTYNfnZrLvTRnTsntt8+fOzeXTPa3teV0PVBcfPzChVFyfdBXuKUEPKrjVCP2IG6clqSBP/95yl13AQ6wA4DXLxXF+s//bAL+4zM6au/Z47S1OXv22IcPA3Vs6NSUKfCFRizm3H9/CE5+bMy4+Wb700/lfHwvYlIgvKZh8MyTT1YuW4Zv4ps3h3/xC/gXh3bRIafNYY/GvEximgZF55AILKKkZI9tf3reeTXFxWnLKp01y6coR/r6TJ8vqCjGc8/Vrl9/yoknkgsqm11f/3eKB0Us0vWiAh+ig8FcbjpEwz4fik2gWlJaKr7r7tavvdbp7hbGhqs0DbAiDKqtTbir9vbm115bev319q5d9ocfKgjgYIkADswWjJAwP5ROd73wQvGyZdCm8Nq19qOPqkSNhEoXcEtVNTfC4gQFc24iW/bkZO+JJ37/rruC/wixUul0xR/+YM2eLYOEDA7CPcK8nYkJzFvoDyKqRCIxNPT+CSfMOvvs2J//XOH3gzDZgO7OTqAXB2SQtxkIKKSr1q5dAHmFeL+gmeSQOX52gNi5nNrdLXbi7bflQrSYT2IwHYYmal1dExMTFdGot7TUCIfloSGHWQbUlkHbNDUHrBi35/iOgzhavK1pq+Nxz5EjDm5z8KBwITU1NuQNJSkr8731ltPVZd98M+gHpGuRDssUQioUmgCuPqyoqNi4sXHu3PHPPpMfekjhoBobywCJe+n6aElJXVGRMAF4Lw65OZwklyEIAuIKREuyXLJ3764bb1z5+ecKEJgNGD4mT+yYFIb6+8e6uipgIIDeykp1cFBYK+ZPkKGAdUEowAA3pcJMIK/h0JmqlhbpW9+CmO3x8aQqUjIBgC3moetqURF2JqeqLTBFWa60LOi5qQOI/TrOMc2saaanTVu3YAHQKLhoUQu0VJZLdb0sFLI40gIEYjH19cVMd0dGWMMFQ6CghzM4bqrEsj6urKy/5prJeLzkwAFBVGC37Ns4dHPJjZUYG3OzYlOnSvv2sZ9nhiMUB/vorjOfJXSTJqaJZezz+fZNm1ai6+Pz52NmAXCSwUGs3O/1nnjoUNH4+IuzZ1d8+9vFweDhw4eT7e0mNrakBDupYjaBQN1xxzH2qqtW9T7yiAhlBwcb//SnmSCnnF5T1Zpk0kSAoaq56mpEBWYhCwn7hBqTumE/DcsqmT9/1aJFyQ0bnH37lEgE5ByqhzuKHaJUFDQrRUrhgggznJISERdxSpDMOJ8WoPDNNQmPBxcnbHvvOeece8cdcKEgycWUB00SGuEn9+KLh371qyX/9m/L2V2ffbZBx9V/ZPBBv//0s87i34ePP9669lqAjUBO3KunZzQeD1VUpDdsSLz5ZvXkpMQGrOs5+Aioqq6XOM5exzFmzRJmu3RpHBoxMTGsaQHbrkZEAXzGymFZ4+O7Z806M0+WACLMqzm0FgSb8Fhjgi5TgtNhBktmCVxZXVdXxQhBQb8ciRT19Qnxl5WBzTdqmgfEA66CoksP+8lEQsgVR8bH5TlzJLJP8aFw3+npKXvxRZNTP8T7g5OTOqhLRUXZokX777577xNPZCAIjwdcORmLeY87LhSJmD09ucrKiyjMDk6ffvCnP21ubq6YPx+M8OBXX012dOhVVaGiolQ2O2/DhtqyMveOmDNkxxlyTsJxJifX1MSIJ3PymakfzAYENRxWsR5gL6yfw06E/sBbICfYiM8nyGF5uQKggto4DhQVTJvxQx0fH1u40PP008VglJ9/bv7kJyI0ZTyHEDEJSvebyeTgbbc1bNrkTnJiwoYcECHJMsKPoPT/+WAZKccJyvLf07HOTuPCC7EcOxxWKMHs8hbE7ZwowO0ByzAGBUoF+4a/cpzheHxnJlPp9cK6gDSYHPSncmysFlSxpMSmkAXeImGacRJQSNfLPR6LoTUYNL76avjAgeLVq+19++BInfJyptwS5dwcgi6vpkWef37ilFOiiEAkqRYeS/CGOJslollBXcE9/pcPjDKcz93kKX4Szty6/36O/sFeJIqohYjJE1ECgFMEFHw7lL+FdU0mk1uamlbfdVcsFOru6PAFAl7gh2kebm9v27WraffuCKEdrPqVlSsrTjkFCmanUnXbty8EtgOKVbU0lRr+7DNn9WoTZyKCpYSDm6ZHlELhG1h+5OhR/YYbzMsugxN29u+3cQnoOi6Bvxwbwx2lxkb59ts9jY3urn7yifn444KBlZbifBiasmIFOKZz4AACNbu5WSgzbIrs9FgVie1IZBdOOMH1e9BY3mqv12uaB2U59dRTyxcv/r/lOuk4I7/4Rd2WLdjVRCbTf++98y65hL/qGRoaBt/88kvYoZ1IvH/OOWf++79nP/tMuvZaQLdEiV64Ys68yJRSFd6IQn8Old2MfyEBBG3KZodmzvQ9/3wZItbWVuOKK4AUCrErcW0eDmRKFUqUSHCzGlxgoDQzB4IineBSFo7+OcMCNTCMkKYVU/bQaW42vvMd/VvfMs87L3PTTdnJyYgsT7nkEiMUghUUBQIznnwy9+677AqnVlTUP/TQeH29kkpZsAXskiR5Z8zADkssZiwMi+TYm1KzNrloYdjQAk6AcrTAjgRWVlSU7uvrRUxGAZaA93AY/gaeT8RAsZigorAFXI5dZQrJiVcSAaevZfbDEB/vtUtuOWgEj4Vnsu0kVAuY3dpqf/651NZm9/RYf/xjy+9/L3A+FlNIZwBgCKqkW2/Vb7vN/uQTmFBZNBqZO9cCGzdNdXISJEwFf8zDNQSKgFGQJNgnthDRMkCFyijMw9wEPWUmxL8IhnV9EhtISqFStUBsCYUEEhUxZfa0nF0gSuNwNh8qQF6Dq0i8QAqOyUdzOa+QlFZxJ9Ir7IAPQ+N+ppkKhztHRo6n0pfMaYpsVsNuYBKvvWZu22ZMmwbuocEUsUjK3QMeMJQMLOScA1dYqPjqOkLeSSqUCv4A1OFAlRGOYg+9uDgGzMtXT8RSsU8Yjes+zMnyjocrrG5QBIlAFpwJpmys5u5qnsRyhh07HMCNiabZ06Z9HA7rhmEZxoEZM05kCjE6KjE1VZRWy3p/8WKA7KpDh6IdHUZHh0jxOU42nR4MBlXKIkh0V4fqYO4iOf/GWXgchEC5hkxhENefXeaIP4NBT77IaAOloB1YJ3QYCsJlDXalXL6lfLObtMqvnxNggnhw2CBuwCIh08JAHlVNjIwI7Z0xw/u73/FufLuurhrWArb8H//BqUJcdWDRorOfew5xf+dvflP1zDOCq6gqaMDWaHT2RRdhgxxELfDVVEPmormb4uEMZj4NLnOVjFRA5iIwxI0jmcyY1zubUtZusoaTcPlcMrNjLvS6dUnO+/K3hXNIrTQuz8L9OsxLqNwKidqQIn0Fs1k8b94xX79nj7V1q7Vzp0zfArcWKUqFz+eXpMCNN4Lif/7WW4GOjq/mzVt4ww0rCeTt554T1l5U5JZUITuGEC585/OPvL0y7xXmQFYHu8hms8bs2cWkgyIa5awN+1UWIimOW5SlcTgbyRkrsZHsDjFhkeLhJgLWMciPS6SKglChGDtD8zOfesppbZVBuQA5CFayWYUywOJPx6nft0+/4w7rttuCkcipl102dvHF7W1tG2tryyl9aW/bZr3/voAArqRzYZmXyrUCTqBCV0VKMScVnA1H6Y6jg3XV1jKXElIG/gEyKDx0CK5YNVwTABJh/9i8TZE+c4N8qkXLvGC3/wHaz2UXKtIinp62fbve2Kh9+aW1ZYtLG3AmdAmRLVclScfg+jxvvKE3N6v/9E+eNWtKYrGmuXPF7NJpe/t28+GHJcTiHPGza4XOM3Un8HTrspg0a2NRkc37TIYnLN2ywqwCJCaBf4yXnIvmFgyOscgKXDQtZDU49Y1vISCBYUuWHGsWwd9UnhZngCHiCJMVcpXcViCzBnIhCmpJhSIxOY6ra2rkVauU0lIJDgm60NEhc0GETQgLwDZy1ZsU262SE8Zw94lbNyzUk+H2stmB+fNjzz7rR1j/wgvWAw84kYjQBWIybL2FLLcb+gLtqSrKGQXhHbAE5jkieKA5udUAyqE6XBkulAKIeIrcNWdeuHJDkbrLZnA+x9m4DZX8hRuA0RJndiN19hlYMDmzYwVrBhUemf/lrxhyGXUsy25sVKqrRTafIz4GV272ALCBn1DngRAWnwBBUIcNC4V7FACBGjtesdp8Rccme+CCrTgCeMQacAHrTN7MZG5/4FpOvtjPGyjT1slcx4DLYZ+XL9ZxZdQdX3HbiDj5xBlGt9pC0rTZu0D1WludlhaZoE5iG+TMDrWUuDyEO1LYvTOSse4w6yL37na7CDfIlXEuZ0FAWAYLEpeRknDmhTsoRJ2S7800gCkrlefc6fK8WcysXVTFc4WIO2IlRJjcdDFmxj6TTFpLpYCLOiu85hbAuP2DK72cpkIYipjF5tAqrxSCZlEOx3XR+Xoi02fFtexvJPFEeZZVNJ/owso9qZRX132plMqoxqXzAlfhlRPpd3t/uN7LPWgse2a5+ayowq0j3H0EzMd68t1gqmke9Xq/xDncyEBe1+1ZY5SiCr4my722PYH94JmzLTBHpsqjW+7G/nM/BfXukB+nsqhbrWYZc48QMXVRcVOUfkl6LRJ5OxaLY1ZseHmJSoUuAyrYs1IoVAd1G3BgJsGg8O34l4GaUhCuVpNCis3HnpOkMN2d8+YN/epXI2VlWt5rFHilRIVOKNVANvv6eef1XXopN1OohoGDGncAshkzx8aFVC5nRBRZL7e14xvdVEKlUylWMDWdTqbTfzj99BU33lhaXDzw2mvRJ59UuFqVTmvgzLpupdPQLom0DrLD1osSZiLhKTiYTAbOxmJVJPt0KLIRfQ2YPUJp7Jht+0jRhnK50PLlZ5x66tDgYOLWW/2RiM/rtRXFJA7vYRhLp3dVVW245ZY6TRt5993Q2FhKlicwH0mqDIV8imJREI5gxl0/Y5toamFl4JwLFdq4R8LhRAwk5zhHA4ElV1yxhEJw+/LLs1u3ygik8JXP15dM7mho8Mdi5QMDGN1AjFVWli0rQ3gERfCPjoaGh+1czvR6546PT3Mck2kQkIY3wetNZ7NvVFbmFi8uNgxfe7uezfZXVp528smi8nP22Tt6e3O9vYGBgfldXQ0IARznnVAoUVqa9PkqL7xwGlUkO2+5peWDD6RYLFJfDz1vPnhw+fbtJbBteErYAgImbqYAmopGJAJPh9K8Tj695lo1oY6laZW6rlKoyNlAlQMuny9jmn9ateqUn/8cYfBIPD4xNqZ5PEVR8fGRNWYMY3R0NJvJePz+iYMHh++6K5bNWpQAY7P0ZDLvVlVNfeSRE+fNg17Ek8mcaa4rLuZgEkNd+OMfI2RDEDO6bRuC0AFI/L77Tj3zzLAkhfNZnuXnn4+fAvmFIiSgxlu2CE5GYMQNkQ55R60QRjn5Crrb8EQQLQwvkykyTf13vzOWLBHs+fHHsXgRqchyIpM5/qKL5lRW4nAYgTh+vgljkuT3eEroW/Gpqhpoa7MefdRB+MECxewdpyEWmz5vnkop3mo4f2wIlKWqyh0JBO7jjysCgYp9+wyPp1qWa955x9/d7SxYIK1Y4Y4cj4taNCa0YAHCZq27u+jTTy0MRXrkdvcyvxAx/KJFMsVfBe/HqRYPba9BjXm6omTS6Wx1tWWaSjwehGGbZiAcVi0rs3hx8OKLlSlTHMxyaEgwhJ4e+9Ah4agbG+Vo1F682HPqqe7swUDvvFOmBrECbgvZL13qWbLEicdFum9gAORx5Pvfr/jRj+AR9Isuco4cEedR76hM+Aoyb2ra8BNP1J92mrN/v3nddfbYmNjGykqR8Qb35Ow8JysLvWnkqylrSTGXQ105TB6wno9M81B9va+42FZVMxTyVldrqqpFIqqmGfF4Lp0uHhlZ9be/FX/yiQH2A/FhhYh7uYOYegodqnQYAKGHH55y5pkiSQq6mu+LPqYL2If//m9p5064VDUfMPhefjm3fn0QLhZyhGZy9MM2STkZr2X5/+u/nKYm+fnntc5Os7RUXDs0BOAwoJUcPAKlKGBwb0fLFIkVmWpTrh9WFETCO/3+7muuOfPcc0so4vG6FYr/WUCUpOS998pbtvh4lvBDHMRzBYPQWA8E/NlsfOvW2Lp1PiDk1KkWtpdidO6jZb4Bt9fn9/+tutobDlcPDlaMjPgHB4euvDLr85nAfE2L+3wqgFrXU8GgAWsKBv3pdOXhw0Pnn99jGIPLl2NjVECjJNUNDZ2QTHrZ6XBXMhw++B9TMVF5yDeqiX3mIBEb1dS0ceNGX77iLo2MSORXBaqNjsrl5VCeUDIZGh8fkOU9mmZHo9ifDMQJbVfVMLWkVqjqcbatwEkcOTI8OFhbVaUMDQnkZErDvXPw8YaRzOXePP30k269tSoWGxkc7D58OIG9UtVASYkXhFyWcTwYDALtgqFQOBzWvF5A/HBvb39zc7SycvmCBUHqgMGwI2Nj8bvvrn3/fYvdLTfQcEc8tXPnVbzQsW/bps+3rLVV2rwZblP+4gvRjzMyolJ7s00eFWuwqquBdkNdXVtOP33ZD35QU1WF2fj8ACkPdiwxOZnOZDKjo32PPjodNqaqKUBRVZW9e7fDSMFPFpDK4QeuaM26dQumivaQ8oYGbrr7f30wme7uymBw/rp1boMTfBgx3Gg4bIGS8wMLhYwH54AozNbYJDh16Pawwn+Cdfzyl5ACHE8SpquqI4D7bDZKA2XT6ZqWlqDPNxAMnn7bbfNrav5uPlVhchkzZmTXrbM/+cSvaRblAx1ACxf1/H4GDpEGDwSiPl/42Wetr7+GmgyEwwlFmd7fL8diImiDuvX3Cw/ywx96jjtOmP+OHdavf409ELAUCilAGY+nq6iooaFB4PCnnzpffy2VlbmRFi2KA08R8EUiGqe8Bd/iBwxI0UX/VzA4nkq9snJleVOTMT7unzYtUlraMTSUhIWUl0/s2rVo587GcNh+910Ebg5C3+ZmofYAFYizslKZPx/eQt28GfQL3CgxMSHuMjAg3B53k3MVmut4wAgAz29/K01M/HXNmrm33TZx3XWlHR0GxieYBWHsPXAg8OqrFaWl1tatTnu7VFIiUa+lNjDwvs/X+cMfxkABOztFKzHWwlEthRAiDUo1BhE5gClwFlcgGAyP66aUf4CiHC4tXXnPPU1U9fl7nVq+3NizB+TQ8/DDomPZstRCao7gEUegyQiqATbYKOCv6HdqbeUuAbe+g/vlch5FyUHZ4LRVdQJotGTJggUL+q64ovf++6FEIUXxEQTmRkfTQ0NYsCjuwWtQZdhQlMF0+utTTvn+1VeD/0488khWUfy6HvX7Teo7dCgUc7sPqRCrcSO9AC0uDkBslNYyvN7ZmYxoJYWc2tqcvj65oUFs0eio0tio7tqFKULVD3s8H9TXT0Fg3dtraBqYUy4cjhpGdTo9FaNjbUAax0FMI+rxY2PwMQ63fZPDgH/cqWmHV60qCwa9HR0DuVzTmjWYXuTyy/8WDCZ6epS+vsojR5YfPRpQVWicwNexMUDd24oyPG8eBJorLz9x0yaMkz733B2joxr8Uy435ZNPlnR2yoUUD5wu17REVnj5cs4bHHvGgqAcugHuobC7yoiP5fEEdB1DY+tUSqb16PofN2y44MYbAZK9vb3BaFTPZHyBQDaRGGptVd56a+mOHcGiIqCINH26Z2ICDpz7pdwEoq4POM6OG264+Oqr4fnGaXKx/1kNBBofTadDV17p2bfv6GOPHQfHdtllQ4bx8T33nHTxxUFg1T8CteFEwv/d7/r7+jjL7QaCFLpqLpRBjQs5awISLAoM9q2ysiKvN2Pb2WnT/EVFvqGhbCplRKOg8k3t7QOx2NnXXtsAc5KkCiAKP+GFT3W1NGvW5Nlnj91xR/CNNzzh8JednWWmWYOAibMwlE6Cgw1ns2doGteBY4OD0vAwLpQoteDs3Alo8FRU1Pf3m0NDIDMDL76IvUAEA5Jx1vbt8PBOWZkze7bMfa0gmAcPSjNn4szyDz80AHWU+udeFrG3lIoSjWlu4opCCplSJLZPtHL+Phar+c1vZlRXIwaKYcOpsyxHQp3o7i695JJ2zPXVV+tqaoBY5n33OfBYMK1Ewli/3vMv/wIal92/X73ssmHTfONHPzrPcaqeflpUjwtkG+jFrnLaNAGWnZ0AlZ5zzql8+OFAV5e+YYOoD5O7hiNMZbN/uuqqExcvrvzJT1Qx76ydzaqy3FdTo7z8cm1NjXHDDfb27SKLKBoyJ9zkFMfSnC2kjAUlNIiICGVjHCcGYqVSM0OhpoaGcp+vRtdB8WAJoXi8tK0tkExWDQyINuV0OkVFI7u9He7H6e+3Ozqk3t7UM88c/vBDQdFiMU8w2FFcfOrFF9dccEHW41EKCSAmt7gWU2luhshEo0QkEtq9O9PWJh05Iko5sZgSiXgRD1jWAUVpWLq0fu3aRHk55GLRozFqIBDu69OhGgjI9u7VwFJGR4Un42ZxzmPmiQYroMaNUBLXOOhhC0HBUin4g+NaWrA/UDALy4DzKC3FCm2gPEGlBoDVNJNoqgVKRM95cCvZ19ksiMc8SiMjLC2NRIogzfp6u6lJ+vhjbmAudH4KPgC27PF4s9nJyckQFOPqqxEk+oAxhpGy7f2O015UlFy/fiNllEf+9V/f/e1vrWhUAyimUqHu7vArrySKigActZpWDeLElINbkzh5yFQcaxTEg5+N4JwWVbfcDDgIuuN0ffHFUVWtxz5kMs7gIHQhijAV4iBKHLCsMS64TZ++o7FxQtex7Qm/P7Ny5aXUlShaCTOZdCDgjI/XlJf76+qc3bvl/HNbDvUCup0Htv16JDJyxhnlmpbq64MOVx89OqOz880TTph66aVLFi2aCd9Opj5n7dqGtWuZXuWE8k7E29sRcpvr13/V1ja8efP83l5+cOBYYanwkKfo8cg3UotYh5vqqSnMa5p7ZPmjjRuPP+20vS0tMCGV2pjN3t7wwYNLmptjgDXD4IKbv7r6uBdekCndB/OvpMIXGLj10EOwfDD+qooKzo1y6wxnPwVeUDZT3Au3vuOOa844AxaYoQdERgxj5LrrztywYRb4Y6HjjnonA52dUnExNtAbjRZFo7XUxCA+S5cOrVmTvfxy38SEzFnXfGAoeDG0XOwwVWtlLrhwWyKhmW5ZbWvXXnvnnQIxC6E2fYay2e4XXww/+aQYgWEZYQr3iLqRVMp+7TXrnXfA8gCViJ99pF0KMUq3SkAQYlMHDNQ+WlIybdkyJd+DKg0MVH/4YQUY1S9/aeBGljXZ1OT/2c9ARazbb7cATmVlQGPQT6m8XJ47V7ngAplyBhW9vToljN0HR7hXlos7FEJpblcjd4fn2/AgeDOXq6ut9XPQumOHDRACZ8ASN22qKCuLXX116r33Ih9/HOrvl+hpF+vBB+3OTgCGCMQw9IEDwruQ9wun0xnQLEBjS4voLaDOMFf2lPREWDcd0f/115vz5gGrna4uZ2JCGRzUqG9AHR3FrAaBTJs2zZk1K9vT4we3Q+AFjevuFuj39tvOSy/Zc+aIW+/d6+bMIDV60EDsH2bFHSCii4dvzPUefoiNDNiDaUEnKfNiPPssXKJMyc3W0dHG++4TzyTNm6ft3Vv3+us6YsN9+6xXXhGFFcTP3LkJhANPzmSgojlsOKKl6mozmVQoe+g+zEkuUDy0geAOtoBY6oMPxPMIEI3Xm/T7WxBLhEJZbJ3fPxoOn0PBee9VV+15/nkRNmD2huGbnIwMDFQODcW6uqCoqt9fgpBNUUyuobmPo2kuAxEqzSm1QsKFq2eUeTa5tMU5Z2pn8gClDh1K6Hqp1wuXYHu9QfD1n/5UdAHDqDBpUl2RpqXkqENo2VFVtYwyW3ZDg/L551L+EXJREKL+qLdAfZcuLR4bA1IgzprX359W1b+sWTN93bqGuXNLy8o8Xm9AUaI0OEy6Zt06jTodTWpkHo3HRzo6jhw8CJeApWqtrYs++miqYVjcoMNPhHKFSeSuAVG8VG4WomhJsFxcnMvxDsM9eqnzaSyVOlRZeQIX0CYmHH7QEfpDGQwI7iPTnAyFjh8bq4QQYZ8ez2gikV64sIY6zrT6eveZcS47pdOaLG+bPn36gw82zZqVhA+jJ7nGdu+OP/XU9x54oJx1gSuVhc5iuInubs4i+ELiUxEMSoilTzqJT8hKUtc77+g//rEGyODOTW4FF2nHhOaWiLiNmZyVUACs2ecLjI3lLMurql1r1+7p7YXixSsrT7r++gCxFgf0gBqHRCwNuQSDnbre8s//vOSMMw7v3v3pq682trerk5N/WrDg9I0b3Q7XRMLi56dJg7DDRi5Xv2DBKmoc9fHy4vHK/fsbMfiVVxogrZOTzuSkmkgMf/e7ZddcA1Oy33zTvOceUXOGBpWWKgjmYjG5pEQ+9VSF4BrrmT1/fo7L5Vy+445hGEskkg8eCi3H/AQXSIXjfFBXN2fz5pjfn4V3NQzQxoiiMChbP/+5uXWrzGUkcqqabXdZlrF58yyaPaK2zn372pubV553XkM+52pceqlz6BBXlR3KocLsTTCNSy9VSkpEUA1UHx5GQCbTA3nu42SapmYybzc1nfrCCyEY5hNPyMBtfj4ZKknVDBH0QolWrlThUDIZ+y9/EfuRfzzcbbuj/gOB0txQe+wxU24iNk0/jJ+0F3GvHwfhcrEte/daW7bYBw+K7jB+4pFcPEIohITx7dudWbOgdZXBYOXq1ctXrz72ZNof/mC3tMj5J0GZDAG0ATz2U0/ZHEVxJx6VbETmmDuXgCa27W1oCHLDc1WVxadxIYrqQW6b9EcfWTt3ii2NRMTMuVILJserJbyk/i9+NJPJVr5UD0CqGRrKDQ1JU6ZYjz1mvfaaG1FR7sJ9vQBX9KglCd+qoVDs2WczgYD/8suVwtPP+IyPiz6Yp59mL+BQXwdXA/hpMYWSW25xnPWQatyiSkZVAVwYA5ciQiIj0M0/S89P6bLExQ87QoxAFV9RkecywzfeB5DvAOCEC9dy+clTXJRMmscf762osN97jxNAMmWtuaGOHxg59gwfh9rgEPhz9my1rg58QDhkRG0I9Pr6uNPDLbtTUdt9tIY4j0yVUYcYKxMBt0uOpivaAiMRzwMPYB/N228XqXaySbfdjBP02IxCJxqxGiGyfPufTKUWCg+hddz8xM/wcLcKV0y51YkKsFxfFVMsvDGBi0/8PDA/Ts6QiK+Af9xHwu0FhcesOJnIlxRaSQsNEUzx+Som9tz1wQ8CYT0ENFyXLTy64VJjtix6St19IQM/jFt4eQRXV0QHAL2vg9kmjyKTSNzOGq5T518acSw/BOullHXh7RFcHHOr5Nz8y+2D1MjCDY+FV2i47wMBMwUhYc3Kl/LcJ9ipE5UVSuwYp4RozTIXrrmJlkO0QuE+FCok94+9xIM7ABmqvF6NOx9l6i4v9Mm7/LPwYCQ1P4mpUDrbfV0Iv0Cm0OrI9+BIKN+Gw+9AEfeDqnOPGHdZ8vsq+EUEvG/s/1lkhVIGV/Z427EeFl/hBN4q1kTuNuFVsQKS0gk/lH/HCI5AvgqHxa5caU+4V921TJ4W3UCUyAvNcsyiaK84mX7s7SmYN+0JFxYKFbBj76UpvJSAuxLyD0WIjn2WciF7zmUh1nZq9nBZA8ZnRpiPhFz/wsE86RFPr9DI6lDnFWaruH1HuD3GpWfg+H0CrvYWGrXzXYPCVrlNstCwQFjFzS7ce+42b3AHOrdFFHqTOK3D5phvk/xm+xM35rlPYHF8zi18kAhDKaNjoWWBxUp/2twfBDlywyoDGNf36E0DtGBqBHD4LTH5J+Tcahj2il4OweIoPHMv5V/tw76EH993Ci25NILMas9NhPmUKOuCm/HHMihn6L4MgIsD4AwsbtwCk8m/u4UVjV9KxJ0eNsdbDBOEl06+yZ+vsmHPsERaKquYRE/EuVAsF/o8C8+A8PtaMJXCe1kKL77R8un7wmMM3JhFyOzQGtz+Ku7k5do6M9t8R4uL21Rt4WbJgmVKBUsu9M4zwufTyYWHzdhT8FuuZH5nEDec8ZYUmjgga36RRDb7fwQYAOkxP+Fzk1pGAAAAAElFTkSuQmCC')))
#print r
# test remind:
#r = c.remind.unread_count.get()
#print r
|
michaelliao/sinaweibopy
|
snspy.py
|
_encode_multipart
|
python
|
def _encode_multipart(**kw):
' build a multipart/form-data body with randomly generated boundary '
boundary = '----------%s' % hex(int(time.time() * 1000))
data = []
for k, v in kw.iteritems():
data.append('--%s' % boundary)
if hasattr(v, 'read'):
# file-like object:
filename = getattr(v, 'name', '')
content = v.read()
data.append('Content-Disposition: form-data; name="%s"; filename="hidden"' % k)
data.append('Content-Length: %d' % len(content))
data.append('Content-Type: %s\r\n' % _guess_content_type(filename))
data.append(content)
else:
data.append('Content-Disposition: form-data; name="%s"\r\n' % k)
data.append(v.encode('utf-8') if isinstance(v, unicode) else v)
data.append('--%s--\r\n' % boundary)
return '\r\n'.join(data), boundary
|
build a multipart/form-data body with randomly generated boundary
|
train
|
https://github.com/michaelliao/sinaweibopy/blob/0f19dd71c1fbd16ee539620c7e9e986887f5c665/snspy.py#L115-L133
| null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
__version__ = '1.0.0'
__author__ = 'Liao Xuefeng (askxuefeng@gmail.com)'
'''
Python client SDK for SNS API using OAuth 2. Require Python 2.6/2.7.
'''
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
import time
import json
import hmac
import hashlib
import base64
import urllib
import urllib2
import urlparse
import gzip
import logging
import mimetypes
import collections
class JsonDict(dict):
'''
General json object that allows attributes to be bound to and also behaves like a dict.
>>> jd = JsonDict(a=1, b='test')
>>> jd.a
1
>>> jd.b
'test'
>>> jd['b']
'test'
>>> jd.c
Traceback (most recent call last):
...
AttributeError: 'JsonDict' object has no attribute 'c'
>>> jd['c']
Traceback (most recent call last):
...
KeyError: 'c'
'''
def __getattr__(self, attr):
try:
return self[attr]
except KeyError:
raise AttributeError(r"'JsonDict' object has no attribute '%s'" % attr)
def __setattr__(self, attr, value):
self[attr] = value
class APIError(StandardError):
'''
raise APIError if receiving json message indicating failure.
'''
def __init__(self, error_code, error, request):
self.error_code = error_code
self.error = error
self.request = request
StandardError.__init__(self, error)
def __str__(self):
return 'APIError: %s: %s, request: %s' % (self.error_code, self.error, self.request)
def _parse_json(s):
'''
Parse json string into JsonDict.
>>> r = _parse_json(r'{"name":"Michael","score":95}')
>>> r.name
u'Michael'
>>> r['score']
95
'''
return json.loads(s, object_hook=lambda pairs: JsonDict(pairs.iteritems()))
def _encode_params(**kw):
'''
Do url-encode parameters
>>> _encode_params(a=1, b='R&D')
'a=1&b=R%26D'
>>> _encode_params(a=u'\u4e2d\u6587', b=['A', 'B', 123])
'a=%E4%B8%AD%E6%96%87&b=A&b=B&b=123'
'''
def _encode(L, k, v):
if isinstance(v, unicode):
L.append('%s=%s' % (k, urllib.quote(v.encode('utf-8'))))
elif isinstance(v, str):
L.append('%s=%s' % (k, urllib.quote(v)))
elif isinstance(v, collections.Iterable):
for x in v:
_encode(L, k, x)
else:
L.append('%s=%s' % (k, urllib.quote(str(v))))
args = []
for k, v in kw.iteritems():
_encode(args, k, v)
return '&'.join(args)
def _guess_content_type(url):
'''
Guess content type by url.
>>> _guess_content_type('http://test/A.HTML')
'text/html'
>>> _guess_content_type('http://test/a.jpg')
'image/jpeg'
>>> _guess_content_type('/path.txt/aaa')
'application/octet-stream'
'''
OCTET_STREAM = 'application/octet-stream'
n = url.rfind('.')
if n == -1:
return OCTET_STREAM
return mimetypes.types_map.get(url[n:].lower(), OCTET_STREAM)
_HTTP_GET = 'GET'
_HTTP_POST = 'POST'
_HTTP_UPLOAD = 'UPLOAD'
def _read_http_body(http_obj):
using_gzip = http_obj.headers.get('Content-Encoding', '') == 'gzip'
body = http_obj.read()
if using_gzip:
gzipper = gzip.GzipFile(fileobj=StringIO(body))
fcontent = gzipper.read()
gzipper.close()
return fcontent
return body
def _http(method, url, headers=None, **kw):
'''
Send http request and return response text.
'''
params = None
boundary = None
if method == 'UPLOAD':
params, boundary = _encode_multipart(**kw)
else:
params = _encode_params(**kw)
http_url = '%s?%s' % (url, params) if method == _HTTP_GET else url
http_body = None if method == 'GET' else params
logging.error('%s: %s' % (method, http_url))
req = urllib2.Request(http_url, data=http_body)
req.add_header('Accept-Encoding', 'gzip')
if headers:
for k, v in headers.iteritems():
req.add_header(k, v)
if boundary:
req.add_header('Content-Type', 'multipart/form-data; boundary=%s' % boundary)
try:
resp = urllib2.urlopen(req, timeout=5)
return _read_http_body(resp)
finally:
pass
class SNSMixin(object):
def __init__(self, app_key, app_secret, redirect_uri):
self._client_id = app_key
self._client_secret = app_secret
self._redirect_uri = redirect_uri
def _prepare_api(self, method, path, access_token, **kw):
raise StandardError('Subclass must implement \'_prepare_api\' method.')
def on_http_error(self, e):
try:
r = _parse_json(_read_http_body(e))
except:
r = None
if hasattr(r, 'error_code'):
raise APIError(r.error_code, r.get('error', ''), r.get('request', ''))
raise e
class SinaWeiboMixin(SNSMixin):
def get_authorize_url(self, redirect_uri, **kw):
'''
return the authorization url that the user should be redirected to.
'''
redirect = redirect_uri if redirect_uri else self._redirect_uri
if not redirect:
raise APIError('21305', 'Parameter absent: redirect_uri', 'OAuth2 request')
response_type = kw.pop('response_type', 'code')
return 'https://api.weibo.com/oauth2/authorize?%s' % \
_encode_params(client_id=self._client_id,
response_type=response_type,
redirect_uri=redirect, **kw)
def _prepare_api(self, method, path, access_token, **kw):
'''
Get api url.
'''
headers = None
if access_token:
headers = {'Authorization': 'OAuth2 %s' % access_token}
if '/remind/' in path:
# sina remind api url is different:
return method, 'https://rm.api.weibo.com/2/%s.json' % path, headers, kw
if method == 'POST' and 'pic' in kw:
# if 'pic' in parameter, set to UPLOAD mode:
return 'UPLOAD', 'https://api.weibo.com/2/%s.json' % path, headers, kw
return method, 'https://api.weibo.com/2/%s.json' % path, headers, kw
def request_access_token(self, code, redirect_uri=None):
'''
Return access token as a JsonDict: {"access_token":"your-access-token","expires":12345678,"uid":1234}, expires is represented using standard unix-epoch-time
'''
redirect = redirect_uri or self._redirect_uri
resp_text = _http('POST', 'https://api.weibo.com/oauth2/access_token',
client_id=self._client_id, client_secret=self._client_secret,
redirect_uri=redirect, code=code, grant_type='authorization_code')
r = _parse_json(resp_text)
current = int(time.time())
expires = r.expires_in + current
remind_in = r.get('remind_in', None)
if remind_in:
rtime = int(remind_in) + current
if rtime < expires:
expires = rtime
return JsonDict(access_token=r.access_token, expires=expires, uid=r.get('uid', None))
def parse_signed_request(self, signed_request):
'''
parse signed request when using in-site app.
Returns:
dict object like { 'uid': 12345, 'access_token': 'ABC123XYZ', 'expires': unix-timestamp },
or None if parse failed.
'''
def _b64_normalize(s):
appendix = '=' * (4 - len(s) % 4)
return s.replace('-', '+').replace('_', '/') + appendix
sr = str(signed_request)
logging.info('parse signed request: %s' % sr)
enc_sig, enc_payload = sr.split('.', 1)
sig = base64.b64decode(_b64_normalize(enc_sig))
data = _parse_json(base64.b64decode(_b64_normalize(enc_payload)))
if data['algorithm'] != u'HMAC-SHA256':
return None
expected_sig = hmac.new(self.client_secret, enc_payload, hashlib.sha256).digest()
if expected_sig == sig:
data.user_id = data.uid = data.get('user_id', None)
data.access_token = data.get('oauth_token', None)
expires = data.get('expires', None)
if expires:
data.expires = data.expires_in = time.time() + expires
return data
return None
class QQMixin(SNSMixin):
def get_authorize_url(self, redirect_uri='', **kw):
'''
return the authorization url that the user should be redirected to.
'''
redirect = redirect_uri if redirect_uri else self._redirect_uri
if not redirect:
raise APIError('21305', 'Parameter absent: redirect_uri', 'OAuth2 request')
response_type = kw.pop('response_type', 'code')
return 'https://graph.qq.com/oauth2.0/authorize?%s' % \
_encode_params(client_id=self._client_id,
response_type=response_type,
redirect_uri=redirect, **kw)
def _prepare_api(self, method, path, access_token, **kw):
kw['access_token'] = access_token
kw['oauth_consumer_key'] = self._client_id
return method, 'https://graph.qq.com/%s' % path, None, kw
def request_access_token(self, code, redirect_uri=None):
'''
Return access token as a JsonDict: {"access_token":"your-access-token","expires":12345678,"uid":1234}, expires is represented using standard unix-epoch-time
'''
redirect = redirect_uri or self._redirect_uri
resp_text = _http('POST', 'https://graph.qq.com/oauth2.0/token',
client_id=self._client_id, client_secret=self._client_secret,
redirect_uri=redirect, code=code, grant_type='authorization_code')
return self._parse_access_token(resp_text)
def refresh_access_token(self, refresh_token, redirect_uri=None):
'''
Refresh access token.
'''
redirect = redirect_uri or self._redirect_uri
resp_text = _http('POST', 'https://graph.qq.com/oauth2.0/token',
refresh_token=refresh_token,
client_id=self._client_id, client_secret=self._client_secret,
redirect_uri=redirect, grant_type='refresh_token')
return self._parse_access_token(resp_text)
# FIXME: get oauthid from 'https://graph.z.qq.com/moc2/me?access_token=%s' % access_token
def _parse_access_token(self, resp_text):
' parse access token from urlencoded str like access_token=abcxyz&expires_in=123000&other=true '
r = self._qs2dict(resp_text)
access_token = r.pop('access_token')
expires = time.time() + float(r.pop('expires_in'))
return JsonDict(access_token=access_token, expires=expires, **r)
def _qs2dict(self, text):
qs = urlparse.parse_qs(text)
return dict(((k, v[0]) for k, v in qs.iteritems()))
def get_openid(self, access_token):
resp_text = _http('GET', 'https://graph.z.qq.com/moc2/me', access_token=access_token)
r = self._qs2dict(resp_text)
return r['openid']
class APIClient(object):
'''
API client using synchronized invocation.
'''
def __init__(self, mixin, app_key, app_secret, redirect_uri='', access_token='', expires=0.0):
self._mixin = mixin(app_key, app_secret, redirect_uri)
self._access_token = str(access_token)
self._expires = expires
def set_access_token(self, access_token, expires):
self._access_token = str(access_token)
self._expires = float(expires)
def get_authorize_url(self, redirect_uri='', **kw):
'''
return the authorization url that the user should be redirected to.
'''
return self._mixin.get_authorize_url(redirect_uri or self._mixin._redirect_uri, **kw)
def request_access_token(self, code, redirect_uri=None):
'''
Return access token as a JsonDict:
{
"access_token": "your-access-token",
"expires": 12345678, # represented using standard unix-epoch-time
"uid": 1234 # other fields
}
'''
r = self._mixin.request_access_token(code, redirect_uri)
self._access_token = r.access_token
return r
def refresh_token(self, refresh_token):
req_str = '%s%s' % (self.auth_url, 'access_token')
r = _http('POST', req_str,
client_id=self.client_id,
client_secret=self.client_secret,
refresh_token=refresh_token,
grant_type='refresh_token')
return self._parse_access_token(r)
def is_expires(self):
return not self.access_token or time.time() > self.expires
def call_api(self, http_method, http_path, **kw):
method, the_url, headers, params = self._mixin._prepare_api(http_method, http_path, self._access_token, **kw)
logging.debug('Call API: %s: %s' % (method, the_url))
try:
resp = _http(method, the_url, headers, **params)
except urllib2.HTTPError, e:
return self._mixin.on_http_error(e)
r = _parse_json(resp)
if hasattr(r, 'error_code'):
raise APIError(r.error_code, r.get('error', ''), r.get('request', ''))
return r
def __getattr__(self, attr):
if hasattr(self._mixin, attr):
return getattr(self._mixin, attr)
return _Callable(self, attr)
class _Executable(object):
def __init__(self, client, method, path):
self._client = client
self._method = method
self._path = path
def __call__(self, **kw):
return self._client.call_api(self._method, self._path, **kw)
def __str__(self):
return '_Executable (%s %s)' % (self._method, self._path)
__repr__ = __str__
class _Callable(object):
def __init__(self, client, name):
self._client = client
self._name = name
def __getattr__(self, attr):
if attr == 'get':
return _Executable(self._client, 'GET', self._name)
if attr == 'post':
return _Executable(self._client, 'POST', self._name)
name = '%s/%s' % (self._name, attr)
return _Callable(self._client, name)
def __str__(self):
return '_Callable (%s)' % self._name
__repr__ = __str__
if __name__ == '__main__':
#import doctest
#doctest.testmod()
APP_KEY = '???'
APP_SECRET = '???'
access_token = '???'
expires = 1393739173.5
#c = APIClient(QQMixin, APP_KEY, APP_SECRET, 'http://www.liaoxuefeng.com/auth/callback', access_token, expires)
#print c.get_openid(access_token)
#r = c.user.get_user_info.get(openid=openid)
# test get:
#r = c.statuses.home_timeline.get(count=10)
#print r
# test post:
#r = c.statuses.update.post(status=u'测试http post')
#print r
# test upload:
#r = c.statuses.upload.post(status=u'测试upload pic', pic=StringIO(base64.b64decode('iVBORw0KGgoAAAANSUhEUgAAAFAAAABQCAIAAAABc2X6AAAAGXRFWHRTb2Z0d2FyZQBBZG9iZSBJbWFnZVJlYWR5ccllPAAALPBJREFUeNqEfAmYVOWV9t1qr67qpXql6W6gAdmVZgfFFXEnGjXBKGPUmDijj8vEuCVqYsaMPnGNjtskOo8LiCbRRBExSDKKiiIq0DR023s3vVVvtd/1f79zbhVO/sz/19MPdN+697vfd75z3vOe5V45u2KFZNuSosi5nOT1SrouGYbk8Uiq6gQC+BPH8YtsmjhHfGtZ+NbxenHE8XjEVaoqmaas6/hT0jQHf+JMHMGw9JWED67CgPSVjN8VxZFl2TAwDr4Sg8uyOM228R++EjPBNBxHnOA4+EaMJv6TxUHcF5fgh77CTfGPQ5eLC2lkcRxn4iuMiani1vjdMBRxP8tSMN1QyL3A55PpbDmdlnGNzyeuxBrwwbe4El9hRFqVGC6bxWyEdGi1Mi1exrcYDQLC+RAEBsE8cByrxSzpRrhESaXE1DEViMPnc/x+sSj8sBwxGk1dpsWI2ePygoBwC/zgND5HUZRcTmYx0TrdO7K88C8JXcNBiYWHDwQP4eECHMS42IFQSOb9z2SwPLF4zJ7W6c4Di8QRWRZ7jtFxHOPiIL7FUJmMWAaLD5MjGbMeyVghJoGDuJy2DpeLcfAnzsEEcBf8jgt5rpAsjrPK4E+6o3uQhCvmBnlB3Hwj2nbWCPGD8UkjSAPxwzqD0bEw0mcxBEbExYYhp1JilrxRuFjThALTOa5S4BxMggbBjYUV4H6YE/7lGeOHF4ARsDB88C9Mg/cnPwc2LneikAj+py0VF/Iy+O7Yf1yO33EVzuS74Ka4tSJ0VgxApuSQyFwtowkr7nZDGbJZh7/je2MZJEJsslNUBGUT68cmk9U5pA5sjQUR4Bcb36bTWIaSSGBMVg2HrMAdGUdwR5qB2BysnzUfIuPVYvN5nZgSjmMxLGX+kOkKrQkGZVYEljt9xPkYHydDNxkL+CtIljECNixugyuxBllWsL34Ipt1zQBanUzCkoW2YDjMyTSV0VEX3khPXGvBPNjkfD6vBoUWuOJuC0+lYLrYB2wdxIezYMY4wurHukpw5d6OJirlcVHNZj0AC0iWrFSsgUGHAYUVmJbEB4+pNE0PC5FIHApLnaFInMTQB9vA4iGFcBjXeHRdy2TMbFYjDHN4W2DwZLcMsFiAYpp2IvF5KtWRyWDlEi0Sx2W2TMJqcT62iFRDJvx04YcBgj9QK8IwiffKcVRFycryYV1PY2QWDYsPy8NN2ZuwkuNesGRWfl4UCx3yxWmyrLnGAIHhB9JimyFFFYaqKFhkh2XtmDkzUF8/59Ch43t7GUvcW7KzwcohfkX5fVmZ8rOf1Xg84Ycfjg0MWDgOqWHSsC5gPuMCBABpYt9wI+wn9hDX0hqE4mJAlg5hIW4BA5vIZl9avLjirLOOdnU1bd0akGWbUZC8hkyKiam6fo5MVxwk2xS/E2KzMDXXBlj2vC3kaYTWAaINw0yndy1ZcuZjj02NRgePHk1fcUUoHreKizGizB6P9FlxnOFcLvid72y44AKMm2hpsZ55BgriEDyydblGhbuw5fO/hM9iWxiZyXmw8Qv/GQppivJRKLTm7rtPaGgAHKVaW6UPPpCCQd46eXLS1QUWPfsXfOX3y2TJvIWuMgrQwqlEJ8QX7G+YcuB3Qp20ac46+eRp0ShsYkp1tX/2bCmTASaJZWAbgkGvZXlzOSiuR1Vrq6tZkEX4Cj+WBTX1ZjIQu8K6Q5h3jBtgZrBqGA5tgrgvpgjgIN4iZplO29lscMaMOQ0NYvttO6Dr0GFhzMAaIkXCG2HNjNW0Kok8uUOI694lb3euwzzmrFn12bTIlsKyXN3cDE8idCWbVQ8fFvPDhHB+LvcXRRmvra2amCjN5ToMI8TLkKTOcDiFBet6Ly4MBAABc2QZN7fZ1UP2BAHsHoWd094qRIkESvGHlBynlU9MZHM5P1YFBO3vZ67i+kUiSA6ZpOAUeZRiSxHnY3BmVhgZFirxSTgXaoxfgkHiHTJwX8XsgczB4JS//tV4/nnvwoXSSy+pw8M2Rvf5sHvbwuHEnXcuXbYsPT7e09rq9fuXLF/OUy3btKlzxoyxoaGy6dMD4XA6lxvfsiW8bRu7pWMcEHMiIxRGjlky6kBdyaTZssCfAt3d40ePFjc0OOSB2dbYoTL0sOG4rpugR6yFEZ4FhyPkO4UTFz4N/zJIwPt5PKosq6lUXNctyAZQ7ziT994LRbJNMwhAsm0tl4uYpm/dunVnnSWQoaxs3owZ0jc+RT7fyaed9s0j1uuvm6xyRBgdJoYs6Dx7FfNjZ8FcQFHgjzAfr65DdhIWjIlZllrg5/jHNC0akMksVmFHoxhHwbDM+XEv5t7kRDXmDxCk62CwQmhpLvdGWZm9enWktNQ2DBuKXVuL45rXa6VSmcHBXCoV6+yc295uHD6s1tVJ5E6djg65pkaKRIRMW1udo0fFPY4ckVIp58sv7S++kBmZMAMYHvtP8nNeQkqm8ZbjWMTDvB6PbpoDAELLata0GDktJxr9dMUKfdcuaN9oOFwiyzMmJsp9PtDUVCYTgGu0LIOdHFE9l7GQAQs9gvHnFi2CbGzGNFyQyWi2vXnGjBm//vWy6dOl//2TlKTczTdH33/frq0VwK5p6tdft6xfP+3BB+E2jI0bnb17MT/hjaC3uH006hTILXNSUjZvMNg8Nvbl1Knlth0dHa00jCpsrNe717L2zJ5dvHIltlcpKzv/e9/z0rZPGEZXZ+fk2FhJRQW29+iePXNffvnLVKp/xYq6gYHj9u2rg2IGAg5zCiKeQncYzDweTSKyjmkx/YC8rXh8Vn19E68Wm3P4sJRMCkoEzccWjY9L06bJVVXheDzc34+vpI4ON3iQ5a6BgalMeKJRH9QP1hoIpMDPsG+67ocJ+XymQAgK5jwenLN7cnLvpk3rr7wSujrY07O/s7P/nXfmffBByw9+cOlNN5XnuVrhE/V4Fs6cWfhzYWPj8KFDdmnp1TfdNGGaB957L/7444t6ehxGZgpXZAoE2EVpTJiY2bsuKxKZv3evfsstSjKpdHVh/XBCKuSkaRYUAUbu9do1NfrQ0N/S6cSsWeXJZGxiwjSMIUUZmT8/RPrTfe65n8XjxYnECFRx7lyoIzStaHLyhD17ai3LJgOGWg7r+v4LL/zhrbcS1ZCmV1VJS5emgHznnnt+W1txV5cE9eEAq0AeoZnYA0wVKgndOXiweNu206ZMsZuaSk466cT16/sWLRq56qrynh4zEhG0HMZM8YmwXAHdK1ZwqCHGgmnRL1ByJ5GAxx+Gp/Z6cWhEVaFaUbhlTYNfnZrLvTRnTsntt8+fOzeXTPa3teV0PVBcfPzChVFyfdBXuKUEPKrjVCP2IG6clqSBP/95yl13AQ6wA4DXLxXF+s//bAL+4zM6au/Z47S1OXv22IcPA3Vs6NSUKfCFRizm3H9/CE5+bMy4+Wb700/lfHwvYlIgvKZh8MyTT1YuW4Zv4ps3h3/xC/gXh3bRIafNYY/GvEximgZF55AILKKkZI9tf3reeTXFxWnLKp01y6coR/r6TJ8vqCjGc8/Vrl9/yoknkgsqm11f/3eKB0Us0vWiAh+ig8FcbjpEwz4fik2gWlJaKr7r7tavvdbp7hbGhqs0DbAiDKqtTbir9vbm115bev319q5d9ocfKgjgYIkADswWjJAwP5ROd73wQvGyZdCm8Nq19qOPqkSNhEoXcEtVNTfC4gQFc24iW/bkZO+JJ37/rruC/wixUul0xR/+YM2eLYOEDA7CPcK8nYkJzFvoDyKqRCIxNPT+CSfMOvvs2J//XOH3gzDZgO7OTqAXB2SQtxkIKKSr1q5dAHmFeL+gmeSQOX52gNi5nNrdLXbi7bflQrSYT2IwHYYmal1dExMTFdGot7TUCIfloSGHWQbUlkHbNDUHrBi35/iOgzhavK1pq+Nxz5EjDm5z8KBwITU1NuQNJSkr8731ltPVZd98M+gHpGuRDssUQioUmgCuPqyoqNi4sXHu3PHPPpMfekjhoBobywCJe+n6aElJXVGRMAF4Lw65OZwklyEIAuIKREuyXLJ3764bb1z5+ecKEJgNGD4mT+yYFIb6+8e6uipgIIDeykp1cFBYK+ZPkKGAdUEowAA3pcJMIK/h0JmqlhbpW9+CmO3x8aQqUjIBgC3moetqURF2JqeqLTBFWa60LOi5qQOI/TrOMc2saaanTVu3YAHQKLhoUQu0VJZLdb0sFLI40gIEYjH19cVMd0dGWMMFQ6CghzM4bqrEsj6urKy/5prJeLzkwAFBVGC37Ns4dHPJjZUYG3OzYlOnSvv2sZ9nhiMUB/vorjOfJXSTJqaJZezz+fZNm1ai6+Pz52NmAXCSwUGs3O/1nnjoUNH4+IuzZ1d8+9vFweDhw4eT7e0mNrakBDupYjaBQN1xxzH2qqtW9T7yiAhlBwcb//SnmSCnnF5T1Zpk0kSAoaq56mpEBWYhCwn7hBqTumE/DcsqmT9/1aJFyQ0bnH37lEgE5ByqhzuKHaJUFDQrRUrhgggznJISERdxSpDMOJ8WoPDNNQmPBxcnbHvvOeece8cdcKEgycWUB00SGuEn9+KLh371qyX/9m/L2V2ffbZBx9V/ZPBBv//0s87i34ePP9669lqAjUBO3KunZzQeD1VUpDdsSLz5ZvXkpMQGrOs5+Aioqq6XOM5exzFmzRJmu3RpHBoxMTGsaQHbrkZEAXzGymFZ4+O7Z806M0+WACLMqzm0FgSb8Fhjgi5TgtNhBktmCVxZXVdXxQhBQb8ciRT19Qnxl5WBzTdqmgfEA66CoksP+8lEQsgVR8bH5TlzJLJP8aFw3+npKXvxRZNTP8T7g5OTOqhLRUXZokX777577xNPZCAIjwdcORmLeY87LhSJmD09ucrKiyjMDk6ffvCnP21ubq6YPx+M8OBXX012dOhVVaGiolQ2O2/DhtqyMveOmDNkxxlyTsJxJifX1MSIJ3PymakfzAYENRxWsR5gL6yfw06E/sBbICfYiM8nyGF5uQKggto4DhQVTJvxQx0fH1u40PP008VglJ9/bv7kJyI0ZTyHEDEJSvebyeTgbbc1bNrkTnJiwoYcECHJMsKPoPT/+WAZKccJyvLf07HOTuPCC7EcOxxWKMHs8hbE7ZwowO0ByzAGBUoF+4a/cpzheHxnJlPp9cK6gDSYHPSncmysFlSxpMSmkAXeImGacRJQSNfLPR6LoTUYNL76avjAgeLVq+19++BInfJyptwS5dwcgi6vpkWef37ilFOiiEAkqRYeS/CGOJslollBXcE9/pcPjDKcz93kKX4Szty6/36O/sFeJIqohYjJE1ECgFMEFHw7lL+FdU0mk1uamlbfdVcsFOru6PAFAl7gh2kebm9v27WraffuCKEdrPqVlSsrTjkFCmanUnXbty8EtgOKVbU0lRr+7DNn9WoTZyKCpYSDm6ZHlELhG1h+5OhR/YYbzMsugxN29u+3cQnoOi6Bvxwbwx2lxkb59ts9jY3urn7yifn444KBlZbifBiasmIFOKZz4AACNbu5WSgzbIrs9FgVie1IZBdOOMH1e9BY3mqv12uaB2U59dRTyxcv/r/lOuk4I7/4Rd2WLdjVRCbTf++98y65hL/qGRoaBt/88kvYoZ1IvH/OOWf++79nP/tMuvZaQLdEiV64Ys68yJRSFd6IQn8Old2MfyEBBG3KZodmzvQ9/3wZItbWVuOKK4AUCrErcW0eDmRKFUqUSHCzGlxgoDQzB4IineBSFo7+OcMCNTCMkKYVU/bQaW42vvMd/VvfMs87L3PTTdnJyYgsT7nkEiMUghUUBQIznnwy9+677AqnVlTUP/TQeH29kkpZsAXskiR5Z8zADkssZiwMi+TYm1KzNrloYdjQAk6AcrTAjgRWVlSU7uvrRUxGAZaA93AY/gaeT8RAsZigorAFXI5dZQrJiVcSAaevZfbDEB/vtUtuOWgEj4Vnsu0kVAuY3dpqf/651NZm9/RYf/xjy+9/L3A+FlNIZwBgCKqkW2/Vb7vN/uQTmFBZNBqZO9cCGzdNdXISJEwFf8zDNQSKgFGQJNgnthDRMkCFyijMw9wEPWUmxL8IhnV9EhtISqFStUBsCYUEEhUxZfa0nF0gSuNwNh8qQF6Dq0i8QAqOyUdzOa+QlFZxJ9Ir7IAPQ+N+ppkKhztHRo6n0pfMaYpsVsNuYBKvvWZu22ZMmwbuocEUsUjK3QMeMJQMLOScA1dYqPjqOkLeSSqUCv4A1OFAlRGOYg+9uDgGzMtXT8RSsU8Yjes+zMnyjocrrG5QBIlAFpwJpmys5u5qnsRyhh07HMCNiabZ06Z9HA7rhmEZxoEZM05kCjE6KjE1VZRWy3p/8WKA7KpDh6IdHUZHh0jxOU42nR4MBlXKIkh0V4fqYO4iOf/GWXgchEC5hkxhENefXeaIP4NBT77IaAOloB1YJ3QYCsJlDXalXL6lfLObtMqvnxNggnhw2CBuwCIh08JAHlVNjIwI7Z0xw/u73/FufLuurhrWArb8H//BqUJcdWDRorOfew5xf+dvflP1zDOCq6gqaMDWaHT2RRdhgxxELfDVVEPmormb4uEMZj4NLnOVjFRA5iIwxI0jmcyY1zubUtZusoaTcPlcMrNjLvS6dUnO+/K3hXNIrTQuz8L9OsxLqNwKidqQIn0Fs1k8b94xX79nj7V1q7Vzp0zfArcWKUqFz+eXpMCNN4Lif/7WW4GOjq/mzVt4ww0rCeTt554T1l5U5JZUITuGEC585/OPvL0y7xXmQFYHu8hms8bs2cWkgyIa5awN+1UWIimOW5SlcTgbyRkrsZHsDjFhkeLhJgLWMciPS6SKglChGDtD8zOfesppbZVBuQA5CFayWYUywOJPx6nft0+/4w7rttuCkcipl102dvHF7W1tG2tryyl9aW/bZr3/voAArqRzYZmXyrUCTqBCV0VKMScVnA1H6Y6jg3XV1jKXElIG/gEyKDx0CK5YNVwTABJh/9i8TZE+c4N8qkXLvGC3/wHaz2UXKtIinp62fbve2Kh9+aW1ZYtLG3AmdAmRLVclScfg+jxvvKE3N6v/9E+eNWtKYrGmuXPF7NJpe/t28+GHJcTiHPGza4XOM3Un8HTrspg0a2NRkc37TIYnLN2ywqwCJCaBf4yXnIvmFgyOscgKXDQtZDU49Y1vISCBYUuWHGsWwd9UnhZngCHiCJMVcpXcViCzBnIhCmpJhSIxOY6ra2rkVauU0lIJDgm60NEhc0GETQgLwDZy1ZsU262SE8Zw94lbNyzUk+H2stmB+fNjzz7rR1j/wgvWAw84kYjQBWIybL2FLLcb+gLtqSrKGQXhHbAE5jkieKA5udUAyqE6XBkulAKIeIrcNWdeuHJDkbrLZnA+x9m4DZX8hRuA0RJndiN19hlYMDmzYwVrBhUemf/lrxhyGXUsy25sVKqrRTafIz4GV272ALCBn1DngRAWnwBBUIcNC4V7FACBGjtesdp8Rccme+CCrTgCeMQacAHrTN7MZG5/4FpOvtjPGyjT1slcx4DLYZ+XL9ZxZdQdX3HbiDj5xBlGt9pC0rTZu0D1WludlhaZoE5iG+TMDrWUuDyEO1LYvTOSse4w6yL37na7CDfIlXEuZ0FAWAYLEpeRknDmhTsoRJ2S7800gCkrlefc6fK8WcysXVTFc4WIO2IlRJjcdDFmxj6TTFpLpYCLOiu85hbAuP2DK72cpkIYipjF5tAqrxSCZlEOx3XR+Xoi02fFtexvJPFEeZZVNJ/owso9qZRX132plMqoxqXzAlfhlRPpd3t/uN7LPWgse2a5+ayowq0j3H0EzMd68t1gqmke9Xq/xDncyEBe1+1ZY5SiCr4my722PYH94JmzLTBHpsqjW+7G/nM/BfXukB+nsqhbrWYZc48QMXVRcVOUfkl6LRJ5OxaLY1ZseHmJSoUuAyrYs1IoVAd1G3BgJsGg8O34l4GaUhCuVpNCis3HnpOkMN2d8+YN/epXI2VlWt5rFHilRIVOKNVANvv6eef1XXopN1OohoGDGncAshkzx8aFVC5nRBRZL7e14xvdVEKlUylWMDWdTqbTfzj99BU33lhaXDzw2mvRJ59UuFqVTmvgzLpupdPQLom0DrLD1osSZiLhKTiYTAbOxmJVJPt0KLIRfQ2YPUJp7Jht+0jRhnK50PLlZ5x66tDgYOLWW/2RiM/rtRXFJA7vYRhLp3dVVW245ZY6TRt5993Q2FhKlicwH0mqDIV8imJREI5gxl0/Y5toamFl4JwLFdq4R8LhRAwk5zhHA4ElV1yxhEJw+/LLs1u3ygik8JXP15dM7mho8Mdi5QMDGN1AjFVWli0rQ3gERfCPjoaGh+1czvR6546PT3Mck2kQkIY3wetNZ7NvVFbmFi8uNgxfe7uezfZXVp528smi8nP22Tt6e3O9vYGBgfldXQ0IARznnVAoUVqa9PkqL7xwGlUkO2+5peWDD6RYLFJfDz1vPnhw+fbtJbBteErYAgImbqYAmopGJAJPh9K8Tj695lo1oY6laZW6rlKoyNlAlQMuny9jmn9ateqUn/8cYfBIPD4xNqZ5PEVR8fGRNWYMY3R0NJvJePz+iYMHh++6K5bNWpQAY7P0ZDLvVlVNfeSRE+fNg17Ek8mcaa4rLuZgEkNd+OMfI2RDEDO6bRuC0AFI/L77Tj3zzLAkhfNZnuXnn4+fAvmFIiSgxlu2CE5GYMQNkQ55R60QRjn5Crrb8EQQLQwvkykyTf13vzOWLBHs+fHHsXgRqchyIpM5/qKL5lRW4nAYgTh+vgljkuT3eEroW/Gpqhpoa7MefdRB+MECxewdpyEWmz5vnkop3mo4f2wIlKWqyh0JBO7jjysCgYp9+wyPp1qWa955x9/d7SxYIK1Y4Y4cj4taNCa0YAHCZq27u+jTTy0MRXrkdvcyvxAx/KJFMsVfBe/HqRYPba9BjXm6omTS6Wx1tWWaSjwehGGbZiAcVi0rs3hx8OKLlSlTHMxyaEgwhJ4e+9Ah4agbG+Vo1F682HPqqe7swUDvvFOmBrECbgvZL13qWbLEicdFum9gAORx5Pvfr/jRj+AR9Isuco4cEedR76hM+Aoyb2ra8BNP1J92mrN/v3nddfbYmNjGykqR8Qb35Ow8JysLvWnkqylrSTGXQ105TB6wno9M81B9va+42FZVMxTyVldrqqpFIqqmGfF4Lp0uHhlZ9be/FX/yiQH2A/FhhYh7uYOYegodqnQYAKGHH55y5pkiSQq6mu+LPqYL2If//m9p5064VDUfMPhefjm3fn0QLhZyhGZy9MM2STkZr2X5/+u/nKYm+fnntc5Os7RUXDs0BOAwoJUcPAKlKGBwb0fLFIkVmWpTrh9WFETCO/3+7muuOfPcc0so4vG6FYr/WUCUpOS998pbtvh4lvBDHMRzBYPQWA8E/NlsfOvW2Lp1PiDk1KkWtpdidO6jZb4Bt9fn9/+tutobDlcPDlaMjPgHB4euvDLr85nAfE2L+3wqgFrXU8GgAWsKBv3pdOXhw0Pnn99jGIPLl2NjVECjJNUNDZ2QTHrZ6XBXMhw++B9TMVF5yDeqiX3mIBEb1dS0ceNGX77iLo2MSORXBaqNjsrl5VCeUDIZGh8fkOU9mmZHo9ifDMQJbVfVMLWkVqjqcbatwEkcOTI8OFhbVaUMDQnkZErDvXPw8YaRzOXePP30k269tSoWGxkc7D58OIG9UtVASYkXhFyWcTwYDALtgqFQOBzWvF5A/HBvb39zc7SycvmCBUHqgMGwI2Nj8bvvrn3/fYvdLTfQcEc8tXPnVbzQsW/bps+3rLVV2rwZblP+4gvRjzMyolJ7s00eFWuwqquBdkNdXVtOP33ZD35QU1WF2fj8ACkPdiwxOZnOZDKjo32PPjodNqaqKUBRVZW9e7fDSMFPFpDK4QeuaM26dQumivaQ8oYGbrr7f30wme7uymBw/rp1boMTfBgx3Gg4bIGS8wMLhYwH54AozNbYJDh16Pawwn+Cdfzyl5ACHE8SpquqI4D7bDZKA2XT6ZqWlqDPNxAMnn7bbfNrav5uPlVhchkzZmTXrbM/+cSvaRblAx1ACxf1/H4GDpEGDwSiPl/42Wetr7+GmgyEwwlFmd7fL8diImiDuvX3Cw/ywx96jjtOmP+OHdavf409ELAUCilAGY+nq6iooaFB4PCnnzpffy2VlbmRFi2KA08R8EUiGqe8Bd/iBwxI0UX/VzA4nkq9snJleVOTMT7unzYtUlraMTSUhIWUl0/s2rVo587GcNh+910Ebg5C3+ZmofYAFYizslKZPx/eQt28GfQL3CgxMSHuMjAg3B53k3MVmut4wAgAz29/K01M/HXNmrm33TZx3XWlHR0GxieYBWHsPXAg8OqrFaWl1tatTnu7VFIiUa+lNjDwvs/X+cMfxkABOztFKzHWwlEthRAiDUo1BhE5gClwFlcgGAyP66aUf4CiHC4tXXnPPU1U9fl7nVq+3NizB+TQ8/DDomPZstRCao7gEUegyQiqATbYKOCv6HdqbeUuAbe+g/vlch5FyUHZ4LRVdQJotGTJggUL+q64ovf++6FEIUXxEQTmRkfTQ0NYsCjuwWtQZdhQlMF0+utTTvn+1VeD/0488khWUfy6HvX7Teo7dCgUc7sPqRCrcSO9AC0uDkBslNYyvN7ZmYxoJYWc2tqcvj65oUFs0eio0tio7tqFKULVD3s8H9TXT0Fg3dtraBqYUy4cjhpGdTo9FaNjbUAax0FMI+rxY2PwMQ63fZPDgH/cqWmHV60qCwa9HR0DuVzTmjWYXuTyy/8WDCZ6epS+vsojR5YfPRpQVWicwNexMUDd24oyPG8eBJorLz9x0yaMkz733B2joxr8Uy435ZNPlnR2yoUUD5wu17REVnj5cs4bHHvGgqAcugHuobC7yoiP5fEEdB1DY+tUSqb16PofN2y44MYbAZK9vb3BaFTPZHyBQDaRGGptVd56a+mOHcGiIqCINH26Z2ICDpz7pdwEoq4POM6OG264+Oqr4fnGaXKx/1kNBBofTadDV17p2bfv6GOPHQfHdtllQ4bx8T33nHTxxUFg1T8CteFEwv/d7/r7+jjL7QaCFLpqLpRBjQs5awISLAoM9q2ysiKvN2Pb2WnT/EVFvqGhbCplRKOg8k3t7QOx2NnXXtsAc5KkCiAKP+GFT3W1NGvW5Nlnj91xR/CNNzzh8JednWWmWYOAibMwlE6Cgw1ns2doGteBY4OD0vAwLpQoteDs3Alo8FRU1Pf3m0NDIDMDL76IvUAEA5Jx1vbt8PBOWZkze7bMfa0gmAcPSjNn4szyDz80AHWU+udeFrG3lIoSjWlu4opCCplSJLZPtHL+Phar+c1vZlRXIwaKYcOpsyxHQp3o7i695JJ2zPXVV+tqaoBY5n33OfBYMK1Ewli/3vMv/wIal92/X73ssmHTfONHPzrPcaqeflpUjwtkG+jFrnLaNAGWnZ0AlZ5zzql8+OFAV5e+YYOoD5O7hiNMZbN/uuqqExcvrvzJT1Qx76ydzaqy3FdTo7z8cm1NjXHDDfb27SKLKBoyJ9zkFMfSnC2kjAUlNIiICGVjHCcGYqVSM0OhpoaGcp+vRtdB8WAJoXi8tK0tkExWDQyINuV0OkVFI7u9He7H6e+3Ozqk3t7UM88c/vBDQdFiMU8w2FFcfOrFF9dccEHW41EKCSAmt7gWU2luhshEo0QkEtq9O9PWJh05Iko5sZgSiXgRD1jWAUVpWLq0fu3aRHk55GLRozFqIBDu69OhGgjI9u7VwFJGR4Un42ZxzmPmiQYroMaNUBLXOOhhC0HBUin4g+NaWrA/UDALy4DzKC3FCm2gPEGlBoDVNJNoqgVKRM95cCvZ19ksiMc8SiMjLC2NRIogzfp6u6lJ+vhjbmAudH4KPgC27PF4s9nJyckQFOPqqxEk+oAxhpGy7f2O015UlFy/fiNllEf+9V/f/e1vrWhUAyimUqHu7vArrySKigActZpWDeLElINbkzh5yFQcaxTEg5+N4JwWVbfcDDgIuuN0ffHFUVWtxz5kMs7gIHQhijAV4iBKHLCsMS64TZ++o7FxQtex7Qm/P7Ny5aXUlShaCTOZdCDgjI/XlJf76+qc3bvl/HNbDvUCup0Htv16JDJyxhnlmpbq64MOVx89OqOz880TTph66aVLFi2aCd9Opj5n7dqGtWuZXuWE8k7E29sRcpvr13/V1ja8efP83l5+cOBYYanwkKfo8cg3UotYh5vqqSnMa5p7ZPmjjRuPP+20vS0tMCGV2pjN3t7wwYNLmptjgDXD4IKbv7r6uBdekCndB/OvpMIXGLj10EOwfDD+qooKzo1y6wxnPwVeUDZT3Au3vuOOa844AxaYoQdERgxj5LrrztywYRb4Y6HjjnonA52dUnExNtAbjRZFo7XUxCA+S5cOrVmTvfxy38SEzFnXfGAoeDG0XOwwVWtlLrhwWyKhmW5ZbWvXXnvnnQIxC6E2fYay2e4XXww/+aQYgWEZYQr3iLqRVMp+7TXrnXfA8gCViJ99pF0KMUq3SkAQYlMHDNQ+WlIybdkyJd+DKg0MVH/4YQUY1S9/aeBGljXZ1OT/2c9ARazbb7cATmVlQGPQT6m8XJ47V7ngAplyBhW9vToljN0HR7hXlos7FEJpblcjd4fn2/AgeDOXq6ut9XPQumOHDRACZ8ASN22qKCuLXX116r33Ih9/HOrvl+hpF+vBB+3OTgCGCMQw9IEDwruQ9wun0xnQLEBjS4voLaDOMFf2lPREWDcd0f/115vz5gGrna4uZ2JCGRzUqG9AHR3FrAaBTJs2zZk1K9vT4we3Q+AFjevuFuj39tvOSy/Zc+aIW+/d6+bMIDV60EDsH2bFHSCii4dvzPUefoiNDNiDaUEnKfNiPPssXKJMyc3W0dHG++4TzyTNm6ft3Vv3+us6YsN9+6xXXhGFFcTP3LkJhANPzmSgojlsOKKl6mozmVQoe+g+zEkuUDy0geAOtoBY6oMPxPMIEI3Xm/T7WxBLhEJZbJ3fPxoOn0PBee9VV+15/nkRNmD2huGbnIwMDFQODcW6uqCoqt9fgpBNUUyuobmPo2kuAxEqzSm1QsKFq2eUeTa5tMU5Z2pn8gClDh1K6Hqp1wuXYHu9QfD1n/5UdAHDqDBpUl2RpqXkqENo2VFVtYwyW3ZDg/L551L+EXJREKL+qLdAfZcuLR4bA1IgzprX359W1b+sWTN93bqGuXNLy8o8Xm9AUaI0OEy6Zt06jTodTWpkHo3HRzo6jhw8CJeApWqtrYs++miqYVjcoMNPhHKFSeSuAVG8VG4WomhJsFxcnMvxDsM9eqnzaSyVOlRZeQIX0CYmHH7QEfpDGQwI7iPTnAyFjh8bq4QQYZ8ez2gikV64sIY6zrT6eveZcS47pdOaLG+bPn36gw82zZqVhA+jJ7nGdu+OP/XU9x54oJx1gSuVhc5iuInubs4i+ELiUxEMSoilTzqJT8hKUtc77+g//rEGyODOTW4FF2nHhOaWiLiNmZyVUACs2ecLjI3lLMurql1r1+7p7YXixSsrT7r++gCxFgf0gBqHRCwNuQSDnbre8s//vOSMMw7v3v3pq682trerk5N/WrDg9I0b3Q7XRMLi56dJg7DDRi5Xv2DBKmoc9fHy4vHK/fsbMfiVVxogrZOTzuSkmkgMf/e7ZddcA1Oy33zTvOceUXOGBpWWKgjmYjG5pEQ+9VSF4BrrmT1/fo7L5Vy+445hGEskkg8eCi3H/AQXSIXjfFBXN2fz5pjfn4V3NQzQxoiiMChbP/+5uXWrzGUkcqqabXdZlrF58yyaPaK2zn372pubV553XkM+52pceqlz6BBXlR3KocLsTTCNSy9VSkpEUA1UHx5GQCbTA3nu42SapmYybzc1nfrCCyEY5hNPyMBtfj4ZKknVDBH0QolWrlThUDIZ+y9/EfuRfzzcbbuj/gOB0txQe+wxU24iNk0/jJ+0F3GvHwfhcrEte/daW7bYBw+K7jB+4pFcPEIohITx7dudWbOgdZXBYOXq1ctXrz72ZNof/mC3tMj5J0GZDAG0ATz2U0/ZHEVxJx6VbETmmDuXgCa27W1oCHLDc1WVxadxIYrqQW6b9EcfWTt3ii2NRMTMuVILJserJbyk/i9+NJPJVr5UD0CqGRrKDQ1JU6ZYjz1mvfaaG1FR7sJ9vQBX9KglCd+qoVDs2WczgYD/8suVwtPP+IyPiz6Yp59mL+BQXwdXA/hpMYWSW25xnPWQatyiSkZVAVwYA5ciQiIj0M0/S89P6bLExQ87QoxAFV9RkecywzfeB5DvAOCEC9dy+clTXJRMmscf762osN97jxNAMmWtuaGOHxg59gwfh9rgEPhz9my1rg58QDhkRG0I9Pr6uNPDLbtTUdt9tIY4j0yVUYcYKxMBt0uOpivaAiMRzwMPYB/N228XqXaySbfdjBP02IxCJxqxGiGyfPufTKUWCg+hddz8xM/wcLcKV0y51YkKsFxfFVMsvDGBi0/8PDA/Ts6QiK+Af9xHwu0FhcesOJnIlxRaSQsNEUzx+Som9tz1wQ8CYT0ENFyXLTy64VJjtix6St19IQM/jFt4eQRXV0QHAL2vg9kmjyKTSNzOGq5T518acSw/BOullHXh7RFcHHOr5Nz8y+2D1MjCDY+FV2i47wMBMwUhYc3Kl/LcJ9ipE5UVSuwYp4RozTIXrrmJlkO0QuE+FCok94+9xIM7ABmqvF6NOx9l6i4v9Mm7/LPwYCQ1P4mpUDrbfV0Iv0Cm0OrI9+BIKN+Gw+9AEfeDqnOPGHdZ8vsq+EUEvG/s/1lkhVIGV/Z427EeFl/hBN4q1kTuNuFVsQKS0gk/lH/HCI5AvgqHxa5caU+4V921TJ4W3UCUyAvNcsyiaK84mX7s7SmYN+0JFxYKFbBj76UpvJSAuxLyD0WIjn2WciF7zmUh1nZq9nBZA8ZnRpiPhFz/wsE86RFPr9DI6lDnFWaruH1HuD3GpWfg+H0CrvYWGrXzXYPCVrlNstCwQFjFzS7ce+42b3AHOrdFFHqTOK3D5phvk/xm+xM35rlPYHF8zi18kAhDKaNjoWWBxUp/2twfBDlywyoDGNf36E0DtGBqBHD4LTH5J+Tcahj2il4OweIoPHMv5V/tw76EH993Ci25NILMas9NhPmUKOuCm/HHMihn6L4MgIsD4AwsbtwCk8m/u4UVjV9KxJ0eNsdbDBOEl06+yZ+vsmHPsERaKquYRE/EuVAsF/o8C8+A8PtaMJXCe1kKL77R8un7wmMM3JhFyOzQGtz+Ku7k5do6M9t8R4uL21Rt4WbJgmVKBUsu9M4zwufTyYWHzdhT8FuuZH5nEDec8ZYUmjgga36RRDb7fwQYAOkxP+Fzk1pGAAAAAElFTkSuQmCC')))
#print r
# test remind:
#r = c.remind.unread_count.get()
#print r
|
michaelliao/sinaweibopy
|
snspy.py
|
_guess_content_type
|
python
|
def _guess_content_type(url):
'''
Guess content type by url.
>>> _guess_content_type('http://test/A.HTML')
'text/html'
>>> _guess_content_type('http://test/a.jpg')
'image/jpeg'
>>> _guess_content_type('/path.txt/aaa')
'application/octet-stream'
'''
OCTET_STREAM = 'application/octet-stream'
n = url.rfind('.')
if n == -1:
return OCTET_STREAM
return mimetypes.types_map.get(url[n:].lower(), OCTET_STREAM)
|
Guess content type by url.
>>> _guess_content_type('http://test/A.HTML')
'text/html'
>>> _guess_content_type('http://test/a.jpg')
'image/jpeg'
>>> _guess_content_type('/path.txt/aaa')
'application/octet-stream'
|
train
|
https://github.com/michaelliao/sinaweibopy/blob/0f19dd71c1fbd16ee539620c7e9e986887f5c665/snspy.py#L136-L151
| null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
__version__ = '1.0.0'
__author__ = 'Liao Xuefeng (askxuefeng@gmail.com)'
'''
Python client SDK for SNS API using OAuth 2. Require Python 2.6/2.7.
'''
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
import time
import json
import hmac
import hashlib
import base64
import urllib
import urllib2
import urlparse
import gzip
import logging
import mimetypes
import collections
class JsonDict(dict):
'''
General json object that allows attributes to be bound to and also behaves like a dict.
>>> jd = JsonDict(a=1, b='test')
>>> jd.a
1
>>> jd.b
'test'
>>> jd['b']
'test'
>>> jd.c
Traceback (most recent call last):
...
AttributeError: 'JsonDict' object has no attribute 'c'
>>> jd['c']
Traceback (most recent call last):
...
KeyError: 'c'
'''
def __getattr__(self, attr):
try:
return self[attr]
except KeyError:
raise AttributeError(r"'JsonDict' object has no attribute '%s'" % attr)
def __setattr__(self, attr, value):
self[attr] = value
class APIError(StandardError):
'''
raise APIError if receiving json message indicating failure.
'''
def __init__(self, error_code, error, request):
self.error_code = error_code
self.error = error
self.request = request
StandardError.__init__(self, error)
def __str__(self):
return 'APIError: %s: %s, request: %s' % (self.error_code, self.error, self.request)
def _parse_json(s):
'''
Parse json string into JsonDict.
>>> r = _parse_json(r'{"name":"Michael","score":95}')
>>> r.name
u'Michael'
>>> r['score']
95
'''
return json.loads(s, object_hook=lambda pairs: JsonDict(pairs.iteritems()))
def _encode_params(**kw):
'''
Do url-encode parameters
>>> _encode_params(a=1, b='R&D')
'a=1&b=R%26D'
>>> _encode_params(a=u'\u4e2d\u6587', b=['A', 'B', 123])
'a=%E4%B8%AD%E6%96%87&b=A&b=B&b=123'
'''
def _encode(L, k, v):
if isinstance(v, unicode):
L.append('%s=%s' % (k, urllib.quote(v.encode('utf-8'))))
elif isinstance(v, str):
L.append('%s=%s' % (k, urllib.quote(v)))
elif isinstance(v, collections.Iterable):
for x in v:
_encode(L, k, x)
else:
L.append('%s=%s' % (k, urllib.quote(str(v))))
args = []
for k, v in kw.iteritems():
_encode(args, k, v)
return '&'.join(args)
def _encode_multipart(**kw):
' build a multipart/form-data body with randomly generated boundary '
boundary = '----------%s' % hex(int(time.time() * 1000))
data = []
for k, v in kw.iteritems():
data.append('--%s' % boundary)
if hasattr(v, 'read'):
# file-like object:
filename = getattr(v, 'name', '')
content = v.read()
data.append('Content-Disposition: form-data; name="%s"; filename="hidden"' % k)
data.append('Content-Length: %d' % len(content))
data.append('Content-Type: %s\r\n' % _guess_content_type(filename))
data.append(content)
else:
data.append('Content-Disposition: form-data; name="%s"\r\n' % k)
data.append(v.encode('utf-8') if isinstance(v, unicode) else v)
data.append('--%s--\r\n' % boundary)
return '\r\n'.join(data), boundary
_HTTP_GET = 'GET'
_HTTP_POST = 'POST'
_HTTP_UPLOAD = 'UPLOAD'
def _read_http_body(http_obj):
using_gzip = http_obj.headers.get('Content-Encoding', '') == 'gzip'
body = http_obj.read()
if using_gzip:
gzipper = gzip.GzipFile(fileobj=StringIO(body))
fcontent = gzipper.read()
gzipper.close()
return fcontent
return body
def _http(method, url, headers=None, **kw):
'''
Send http request and return response text.
'''
params = None
boundary = None
if method == 'UPLOAD':
params, boundary = _encode_multipart(**kw)
else:
params = _encode_params(**kw)
http_url = '%s?%s' % (url, params) if method == _HTTP_GET else url
http_body = None if method == 'GET' else params
logging.error('%s: %s' % (method, http_url))
req = urllib2.Request(http_url, data=http_body)
req.add_header('Accept-Encoding', 'gzip')
if headers:
for k, v in headers.iteritems():
req.add_header(k, v)
if boundary:
req.add_header('Content-Type', 'multipart/form-data; boundary=%s' % boundary)
try:
resp = urllib2.urlopen(req, timeout=5)
return _read_http_body(resp)
finally:
pass
class SNSMixin(object):
def __init__(self, app_key, app_secret, redirect_uri):
self._client_id = app_key
self._client_secret = app_secret
self._redirect_uri = redirect_uri
def _prepare_api(self, method, path, access_token, **kw):
raise StandardError('Subclass must implement \'_prepare_api\' method.')
def on_http_error(self, e):
try:
r = _parse_json(_read_http_body(e))
except:
r = None
if hasattr(r, 'error_code'):
raise APIError(r.error_code, r.get('error', ''), r.get('request', ''))
raise e
class SinaWeiboMixin(SNSMixin):
def get_authorize_url(self, redirect_uri, **kw):
'''
return the authorization url that the user should be redirected to.
'''
redirect = redirect_uri if redirect_uri else self._redirect_uri
if not redirect:
raise APIError('21305', 'Parameter absent: redirect_uri', 'OAuth2 request')
response_type = kw.pop('response_type', 'code')
return 'https://api.weibo.com/oauth2/authorize?%s' % \
_encode_params(client_id=self._client_id,
response_type=response_type,
redirect_uri=redirect, **kw)
def _prepare_api(self, method, path, access_token, **kw):
'''
Get api url.
'''
headers = None
if access_token:
headers = {'Authorization': 'OAuth2 %s' % access_token}
if '/remind/' in path:
# sina remind api url is different:
return method, 'https://rm.api.weibo.com/2/%s.json' % path, headers, kw
if method == 'POST' and 'pic' in kw:
# if 'pic' in parameter, set to UPLOAD mode:
return 'UPLOAD', 'https://api.weibo.com/2/%s.json' % path, headers, kw
return method, 'https://api.weibo.com/2/%s.json' % path, headers, kw
def request_access_token(self, code, redirect_uri=None):
'''
Return access token as a JsonDict: {"access_token":"your-access-token","expires":12345678,"uid":1234}, expires is represented using standard unix-epoch-time
'''
redirect = redirect_uri or self._redirect_uri
resp_text = _http('POST', 'https://api.weibo.com/oauth2/access_token',
client_id=self._client_id, client_secret=self._client_secret,
redirect_uri=redirect, code=code, grant_type='authorization_code')
r = _parse_json(resp_text)
current = int(time.time())
expires = r.expires_in + current
remind_in = r.get('remind_in', None)
if remind_in:
rtime = int(remind_in) + current
if rtime < expires:
expires = rtime
return JsonDict(access_token=r.access_token, expires=expires, uid=r.get('uid', None))
def parse_signed_request(self, signed_request):
'''
parse signed request when using in-site app.
Returns:
dict object like { 'uid': 12345, 'access_token': 'ABC123XYZ', 'expires': unix-timestamp },
or None if parse failed.
'''
def _b64_normalize(s):
appendix = '=' * (4 - len(s) % 4)
return s.replace('-', '+').replace('_', '/') + appendix
sr = str(signed_request)
logging.info('parse signed request: %s' % sr)
enc_sig, enc_payload = sr.split('.', 1)
sig = base64.b64decode(_b64_normalize(enc_sig))
data = _parse_json(base64.b64decode(_b64_normalize(enc_payload)))
if data['algorithm'] != u'HMAC-SHA256':
return None
expected_sig = hmac.new(self.client_secret, enc_payload, hashlib.sha256).digest()
if expected_sig == sig:
data.user_id = data.uid = data.get('user_id', None)
data.access_token = data.get('oauth_token', None)
expires = data.get('expires', None)
if expires:
data.expires = data.expires_in = time.time() + expires
return data
return None
class QQMixin(SNSMixin):
def get_authorize_url(self, redirect_uri='', **kw):
'''
return the authorization url that the user should be redirected to.
'''
redirect = redirect_uri if redirect_uri else self._redirect_uri
if not redirect:
raise APIError('21305', 'Parameter absent: redirect_uri', 'OAuth2 request')
response_type = kw.pop('response_type', 'code')
return 'https://graph.qq.com/oauth2.0/authorize?%s' % \
_encode_params(client_id=self._client_id,
response_type=response_type,
redirect_uri=redirect, **kw)
def _prepare_api(self, method, path, access_token, **kw):
kw['access_token'] = access_token
kw['oauth_consumer_key'] = self._client_id
return method, 'https://graph.qq.com/%s' % path, None, kw
def request_access_token(self, code, redirect_uri=None):
'''
Return access token as a JsonDict: {"access_token":"your-access-token","expires":12345678,"uid":1234}, expires is represented using standard unix-epoch-time
'''
redirect = redirect_uri or self._redirect_uri
resp_text = _http('POST', 'https://graph.qq.com/oauth2.0/token',
client_id=self._client_id, client_secret=self._client_secret,
redirect_uri=redirect, code=code, grant_type='authorization_code')
return self._parse_access_token(resp_text)
def refresh_access_token(self, refresh_token, redirect_uri=None):
'''
Refresh access token.
'''
redirect = redirect_uri or self._redirect_uri
resp_text = _http('POST', 'https://graph.qq.com/oauth2.0/token',
refresh_token=refresh_token,
client_id=self._client_id, client_secret=self._client_secret,
redirect_uri=redirect, grant_type='refresh_token')
return self._parse_access_token(resp_text)
# FIXME: get oauthid from 'https://graph.z.qq.com/moc2/me?access_token=%s' % access_token
def _parse_access_token(self, resp_text):
' parse access token from urlencoded str like access_token=abcxyz&expires_in=123000&other=true '
r = self._qs2dict(resp_text)
access_token = r.pop('access_token')
expires = time.time() + float(r.pop('expires_in'))
return JsonDict(access_token=access_token, expires=expires, **r)
def _qs2dict(self, text):
qs = urlparse.parse_qs(text)
return dict(((k, v[0]) for k, v in qs.iteritems()))
def get_openid(self, access_token):
resp_text = _http('GET', 'https://graph.z.qq.com/moc2/me', access_token=access_token)
r = self._qs2dict(resp_text)
return r['openid']
class APIClient(object):
'''
API client using synchronized invocation.
'''
def __init__(self, mixin, app_key, app_secret, redirect_uri='', access_token='', expires=0.0):
self._mixin = mixin(app_key, app_secret, redirect_uri)
self._access_token = str(access_token)
self._expires = expires
def set_access_token(self, access_token, expires):
self._access_token = str(access_token)
self._expires = float(expires)
def get_authorize_url(self, redirect_uri='', **kw):
'''
return the authorization url that the user should be redirected to.
'''
return self._mixin.get_authorize_url(redirect_uri or self._mixin._redirect_uri, **kw)
def request_access_token(self, code, redirect_uri=None):
'''
Return access token as a JsonDict:
{
"access_token": "your-access-token",
"expires": 12345678, # represented using standard unix-epoch-time
"uid": 1234 # other fields
}
'''
r = self._mixin.request_access_token(code, redirect_uri)
self._access_token = r.access_token
return r
def refresh_token(self, refresh_token):
req_str = '%s%s' % (self.auth_url, 'access_token')
r = _http('POST', req_str,
client_id=self.client_id,
client_secret=self.client_secret,
refresh_token=refresh_token,
grant_type='refresh_token')
return self._parse_access_token(r)
def is_expires(self):
return not self.access_token or time.time() > self.expires
def call_api(self, http_method, http_path, **kw):
method, the_url, headers, params = self._mixin._prepare_api(http_method, http_path, self._access_token, **kw)
logging.debug('Call API: %s: %s' % (method, the_url))
try:
resp = _http(method, the_url, headers, **params)
except urllib2.HTTPError, e:
return self._mixin.on_http_error(e)
r = _parse_json(resp)
if hasattr(r, 'error_code'):
raise APIError(r.error_code, r.get('error', ''), r.get('request', ''))
return r
def __getattr__(self, attr):
if hasattr(self._mixin, attr):
return getattr(self._mixin, attr)
return _Callable(self, attr)
class _Executable(object):
def __init__(self, client, method, path):
self._client = client
self._method = method
self._path = path
def __call__(self, **kw):
return self._client.call_api(self._method, self._path, **kw)
def __str__(self):
return '_Executable (%s %s)' % (self._method, self._path)
__repr__ = __str__
class _Callable(object):
def __init__(self, client, name):
self._client = client
self._name = name
def __getattr__(self, attr):
if attr == 'get':
return _Executable(self._client, 'GET', self._name)
if attr == 'post':
return _Executable(self._client, 'POST', self._name)
name = '%s/%s' % (self._name, attr)
return _Callable(self._client, name)
def __str__(self):
return '_Callable (%s)' % self._name
__repr__ = __str__
if __name__ == '__main__':
#import doctest
#doctest.testmod()
APP_KEY = '???'
APP_SECRET = '???'
access_token = '???'
expires = 1393739173.5
#c = APIClient(QQMixin, APP_KEY, APP_SECRET, 'http://www.liaoxuefeng.com/auth/callback', access_token, expires)
#print c.get_openid(access_token)
#r = c.user.get_user_info.get(openid=openid)
# test get:
#r = c.statuses.home_timeline.get(count=10)
#print r
# test post:
#r = c.statuses.update.post(status=u'测试http post')
#print r
# test upload:
#r = c.statuses.upload.post(status=u'测试upload pic', pic=StringIO(base64.b64decode('iVBORw0KGgoAAAANSUhEUgAAAFAAAABQCAIAAAABc2X6AAAAGXRFWHRTb2Z0d2FyZQBBZG9iZSBJbWFnZVJlYWR5ccllPAAALPBJREFUeNqEfAmYVOWV9t1qr67qpXql6W6gAdmVZgfFFXEnGjXBKGPUmDijj8vEuCVqYsaMPnGNjtskOo8LiCbRRBExSDKKiiIq0DR023s3vVVvtd/1f79zbhVO/sz/19MPdN+697vfd75z3vOe5V45u2KFZNuSosi5nOT1SrouGYbk8Uiq6gQC+BPH8YtsmjhHfGtZ+NbxenHE8XjEVaoqmaas6/hT0jQHf+JMHMGw9JWED67CgPSVjN8VxZFl2TAwDr4Sg8uyOM228R++EjPBNBxHnOA4+EaMJv6TxUHcF5fgh77CTfGPQ5eLC2lkcRxn4iuMiani1vjdMBRxP8tSMN1QyL3A55PpbDmdlnGNzyeuxBrwwbe4El9hRFqVGC6bxWyEdGi1Mi1exrcYDQLC+RAEBsE8cByrxSzpRrhESaXE1DEViMPnc/x+sSj8sBwxGk1dpsWI2ePygoBwC/zgND5HUZRcTmYx0TrdO7K88C8JXcNBiYWHDwQP4eECHMS42IFQSOb9z2SwPLF4zJ7W6c4Di8QRWRZ7jtFxHOPiIL7FUJmMWAaLD5MjGbMeyVghJoGDuJy2DpeLcfAnzsEEcBf8jgt5rpAsjrPK4E+6o3uQhCvmBnlB3Hwj2nbWCPGD8UkjSAPxwzqD0bEw0mcxBEbExYYhp1JilrxRuFjThALTOa5S4BxMggbBjYUV4H6YE/7lGeOHF4ARsDB88C9Mg/cnPwc2LneikAj+py0VF/Iy+O7Yf1yO33EVzuS74Ka4tSJ0VgxApuSQyFwtowkr7nZDGbJZh7/je2MZJEJsslNUBGUT68cmk9U5pA5sjQUR4Bcb36bTWIaSSGBMVg2HrMAdGUdwR5qB2BysnzUfIuPVYvN5nZgSjmMxLGX+kOkKrQkGZVYEljt9xPkYHydDNxkL+CtIljECNixugyuxBllWsL34Ipt1zQBanUzCkoW2YDjMyTSV0VEX3khPXGvBPNjkfD6vBoUWuOJuC0+lYLrYB2wdxIezYMY4wurHukpw5d6OJirlcVHNZj0AC0iWrFSsgUGHAYUVmJbEB4+pNE0PC5FIHApLnaFInMTQB9vA4iGFcBjXeHRdy2TMbFYjDHN4W2DwZLcMsFiAYpp2IvF5KtWRyWDlEi0Sx2W2TMJqcT62iFRDJvx04YcBgj9QK8IwiffKcVRFycryYV1PY2QWDYsPy8NN2ZuwkuNesGRWfl4UCx3yxWmyrLnGAIHhB9JimyFFFYaqKFhkh2XtmDkzUF8/59Ch43t7GUvcW7KzwcohfkX5fVmZ8rOf1Xg84Ycfjg0MWDgOqWHSsC5gPuMCBABpYt9wI+wn9hDX0hqE4mJAlg5hIW4BA5vIZl9avLjirLOOdnU1bd0akGWbUZC8hkyKiam6fo5MVxwk2xS/E2KzMDXXBlj2vC3kaYTWAaINw0yndy1ZcuZjj02NRgePHk1fcUUoHreKizGizB6P9FlxnOFcLvid72y44AKMm2hpsZ55BgriEDyydblGhbuw5fO/hM9iWxiZyXmw8Qv/GQppivJRKLTm7rtPaGgAHKVaW6UPPpCCQd46eXLS1QUWPfsXfOX3y2TJvIWuMgrQwqlEJ8QX7G+YcuB3Qp20ac46+eRp0ShsYkp1tX/2bCmTASaJZWAbgkGvZXlzOSiuR1Vrq6tZkEX4Cj+WBTX1ZjIQu8K6Q5h3jBtgZrBqGA5tgrgvpgjgIN4iZplO29lscMaMOQ0NYvttO6Dr0GFhzMAaIkXCG2HNjNW0Kok8uUOI694lb3euwzzmrFn12bTIlsKyXN3cDE8idCWbVQ8fFvPDhHB+LvcXRRmvra2amCjN5ToMI8TLkKTOcDiFBet6Ly4MBAABc2QZN7fZ1UP2BAHsHoWd094qRIkESvGHlBynlU9MZHM5P1YFBO3vZ67i+kUiSA6ZpOAUeZRiSxHnY3BmVhgZFirxSTgXaoxfgkHiHTJwX8XsgczB4JS//tV4/nnvwoXSSy+pw8M2Rvf5sHvbwuHEnXcuXbYsPT7e09rq9fuXLF/OUy3btKlzxoyxoaGy6dMD4XA6lxvfsiW8bRu7pWMcEHMiIxRGjlky6kBdyaTZssCfAt3d40ePFjc0OOSB2dbYoTL0sOG4rpugR6yFEZ4FhyPkO4UTFz4N/zJIwPt5PKosq6lUXNctyAZQ7ziT994LRbJNMwhAsm0tl4uYpm/dunVnnSWQoaxs3owZ0jc+RT7fyaed9s0j1uuvm6xyRBgdJoYs6Dx7FfNjZ8FcQFHgjzAfr65DdhIWjIlZllrg5/jHNC0akMksVmFHoxhHwbDM+XEv5t7kRDXmDxCk62CwQmhpLvdGWZm9enWktNQ2DBuKXVuL45rXa6VSmcHBXCoV6+yc295uHD6s1tVJ5E6djg65pkaKRIRMW1udo0fFPY4ckVIp58sv7S++kBmZMAMYHvtP8nNeQkqm8ZbjWMTDvB6PbpoDAELLata0GDktJxr9dMUKfdcuaN9oOFwiyzMmJsp9PtDUVCYTgGu0LIOdHFE9l7GQAQs9gvHnFi2CbGzGNFyQyWi2vXnGjBm//vWy6dOl//2TlKTczTdH33/frq0VwK5p6tdft6xfP+3BB+E2jI0bnb17MT/hjaC3uH006hTILXNSUjZvMNg8Nvbl1Knlth0dHa00jCpsrNe717L2zJ5dvHIltlcpKzv/e9/z0rZPGEZXZ+fk2FhJRQW29+iePXNffvnLVKp/xYq6gYHj9u2rg2IGAg5zCiKeQncYzDweTSKyjmkx/YC8rXh8Vn19E68Wm3P4sJRMCkoEzccWjY9L06bJVVXheDzc34+vpI4ON3iQ5a6BgalMeKJRH9QP1hoIpMDPsG+67ocJ+XymQAgK5jwenLN7cnLvpk3rr7wSujrY07O/s7P/nXfmffBByw9+cOlNN5XnuVrhE/V4Fs6cWfhzYWPj8KFDdmnp1TfdNGGaB957L/7444t6ehxGZgpXZAoE2EVpTJiY2bsuKxKZv3evfsstSjKpdHVh/XBCKuSkaRYUAUbu9do1NfrQ0N/S6cSsWeXJZGxiwjSMIUUZmT8/RPrTfe65n8XjxYnECFRx7lyoIzStaHLyhD17ai3LJgOGWg7r+v4LL/zhrbcS1ZCmV1VJS5emgHznnnt+W1txV5cE9eEAq0AeoZnYA0wVKgndOXiweNu206ZMsZuaSk466cT16/sWLRq56qrynh4zEhG0HMZM8YmwXAHdK1ZwqCHGgmnRL1ByJ5GAxx+Gp/Z6cWhEVaFaUbhlTYNfnZrLvTRnTsntt8+fOzeXTPa3teV0PVBcfPzChVFyfdBXuKUEPKrjVCP2IG6clqSBP/95yl13AQ6wA4DXLxXF+s//bAL+4zM6au/Z47S1OXv22IcPA3Vs6NSUKfCFRizm3H9/CE5+bMy4+Wb700/lfHwvYlIgvKZh8MyTT1YuW4Zv4ps3h3/xC/gXh3bRIafNYY/GvEximgZF55AILKKkZI9tf3reeTXFxWnLKp01y6coR/r6TJ8vqCjGc8/Vrl9/yoknkgsqm11f/3eKB0Us0vWiAh+ig8FcbjpEwz4fik2gWlJaKr7r7tavvdbp7hbGhqs0DbAiDKqtTbir9vbm115bev319q5d9ocfKgjgYIkADswWjJAwP5ROd73wQvGyZdCm8Nq19qOPqkSNhEoXcEtVNTfC4gQFc24iW/bkZO+JJ37/rruC/wixUul0xR/+YM2eLYOEDA7CPcK8nYkJzFvoDyKqRCIxNPT+CSfMOvvs2J//XOH3gzDZgO7OTqAXB2SQtxkIKKSr1q5dAHmFeL+gmeSQOX52gNi5nNrdLXbi7bflQrSYT2IwHYYmal1dExMTFdGot7TUCIfloSGHWQbUlkHbNDUHrBi35/iOgzhavK1pq+Nxz5EjDm5z8KBwITU1NuQNJSkr8731ltPVZd98M+gHpGuRDssUQioUmgCuPqyoqNi4sXHu3PHPPpMfekjhoBobywCJe+n6aElJXVGRMAF4Lw65OZwklyEIAuIKREuyXLJ3764bb1z5+ecKEJgNGD4mT+yYFIb6+8e6uipgIIDeykp1cFBYK+ZPkKGAdUEowAA3pcJMIK/h0JmqlhbpW9+CmO3x8aQqUjIBgC3moetqURF2JqeqLTBFWa60LOi5qQOI/TrOMc2saaanTVu3YAHQKLhoUQu0VJZLdb0sFLI40gIEYjH19cVMd0dGWMMFQ6CghzM4bqrEsj6urKy/5prJeLzkwAFBVGC37Ns4dHPJjZUYG3OzYlOnSvv2sZ9nhiMUB/vorjOfJXSTJqaJZezz+fZNm1ai6+Pz52NmAXCSwUGs3O/1nnjoUNH4+IuzZ1d8+9vFweDhw4eT7e0mNrakBDupYjaBQN1xxzH2qqtW9T7yiAhlBwcb//SnmSCnnF5T1Zpk0kSAoaq56mpEBWYhCwn7hBqTumE/DcsqmT9/1aJFyQ0bnH37lEgE5ByqhzuKHaJUFDQrRUrhgggznJISERdxSpDMOJ8WoPDNNQmPBxcnbHvvOeece8cdcKEgycWUB00SGuEn9+KLh371qyX/9m/L2V2ffbZBx9V/ZPBBv//0s87i34ePP9669lqAjUBO3KunZzQeD1VUpDdsSLz5ZvXkpMQGrOs5+Aioqq6XOM5exzFmzRJmu3RpHBoxMTGsaQHbrkZEAXzGymFZ4+O7Z806M0+WACLMqzm0FgSb8Fhjgi5TgtNhBktmCVxZXVdXxQhBQb8ciRT19Qnxl5WBzTdqmgfEA66CoksP+8lEQsgVR8bH5TlzJLJP8aFw3+npKXvxRZNTP8T7g5OTOqhLRUXZokX777577xNPZCAIjwdcORmLeY87LhSJmD09ucrKiyjMDk6ffvCnP21ubq6YPx+M8OBXX012dOhVVaGiolQ2O2/DhtqyMveOmDNkxxlyTsJxJifX1MSIJ3PymakfzAYENRxWsR5gL6yfw06E/sBbICfYiM8nyGF5uQKggto4DhQVTJvxQx0fH1u40PP008VglJ9/bv7kJyI0ZTyHEDEJSvebyeTgbbc1bNrkTnJiwoYcECHJMsKPoPT/+WAZKccJyvLf07HOTuPCC7EcOxxWKMHs8hbE7ZwowO0ByzAGBUoF+4a/cpzheHxnJlPp9cK6gDSYHPSncmysFlSxpMSmkAXeImGacRJQSNfLPR6LoTUYNL76avjAgeLVq+19++BInfJyptwS5dwcgi6vpkWef37ilFOiiEAkqRYeS/CGOJslollBXcE9/pcPjDKcz93kKX4Szty6/36O/sFeJIqohYjJE1ECgFMEFHw7lL+FdU0mk1uamlbfdVcsFOru6PAFAl7gh2kebm9v27WraffuCKEdrPqVlSsrTjkFCmanUnXbty8EtgOKVbU0lRr+7DNn9WoTZyKCpYSDm6ZHlELhG1h+5OhR/YYbzMsugxN29u+3cQnoOi6Bvxwbwx2lxkb59ts9jY3urn7yifn444KBlZbifBiasmIFOKZz4AACNbu5WSgzbIrs9FgVie1IZBdOOMH1e9BY3mqv12uaB2U59dRTyxcv/r/lOuk4I7/4Rd2WLdjVRCbTf++98y65hL/qGRoaBt/88kvYoZ1IvH/OOWf++79nP/tMuvZaQLdEiV64Ys68yJRSFd6IQn8Old2MfyEBBG3KZodmzvQ9/3wZItbWVuOKK4AUCrErcW0eDmRKFUqUSHCzGlxgoDQzB4IineBSFo7+OcMCNTCMkKYVU/bQaW42vvMd/VvfMs87L3PTTdnJyYgsT7nkEiMUghUUBQIznnwy9+677AqnVlTUP/TQeH29kkpZsAXskiR5Z8zADkssZiwMi+TYm1KzNrloYdjQAk6AcrTAjgRWVlSU7uvrRUxGAZaA93AY/gaeT8RAsZigorAFXI5dZQrJiVcSAaevZfbDEB/vtUtuOWgEj4Vnsu0kVAuY3dpqf/651NZm9/RYf/xjy+9/L3A+FlNIZwBgCKqkW2/Vb7vN/uQTmFBZNBqZO9cCGzdNdXISJEwFf8zDNQSKgFGQJNgnthDRMkCFyijMw9wEPWUmxL8IhnV9EhtISqFStUBsCYUEEhUxZfa0nF0gSuNwNh8qQF6Dq0i8QAqOyUdzOa+QlFZxJ9Ir7IAPQ+N+ppkKhztHRo6n0pfMaYpsVsNuYBKvvWZu22ZMmwbuocEUsUjK3QMeMJQMLOScA1dYqPjqOkLeSSqUCv4A1OFAlRGOYg+9uDgGzMtXT8RSsU8Yjes+zMnyjocrrG5QBIlAFpwJpmys5u5qnsRyhh07HMCNiabZ06Z9HA7rhmEZxoEZM05kCjE6KjE1VZRWy3p/8WKA7KpDh6IdHUZHh0jxOU42nR4MBlXKIkh0V4fqYO4iOf/GWXgchEC5hkxhENefXeaIP4NBT77IaAOloB1YJ3QYCsJlDXalXL6lfLObtMqvnxNggnhw2CBuwCIh08JAHlVNjIwI7Z0xw/u73/FufLuurhrWArb8H//BqUJcdWDRorOfew5xf+dvflP1zDOCq6gqaMDWaHT2RRdhgxxELfDVVEPmormb4uEMZj4NLnOVjFRA5iIwxI0jmcyY1zubUtZusoaTcPlcMrNjLvS6dUnO+/K3hXNIrTQuz8L9OsxLqNwKidqQIn0Fs1k8b94xX79nj7V1q7Vzp0zfArcWKUqFz+eXpMCNN4Lif/7WW4GOjq/mzVt4ww0rCeTt554T1l5U5JZUITuGEC585/OPvL0y7xXmQFYHu8hms8bs2cWkgyIa5awN+1UWIimOW5SlcTgbyRkrsZHsDjFhkeLhJgLWMciPS6SKglChGDtD8zOfesppbZVBuQA5CFayWYUywOJPx6nft0+/4w7rttuCkcipl102dvHF7W1tG2tryyl9aW/bZr3/voAArqRzYZmXyrUCTqBCV0VKMScVnA1H6Y6jg3XV1jKXElIG/gEyKDx0CK5YNVwTABJh/9i8TZE+c4N8qkXLvGC3/wHaz2UXKtIinp62fbve2Kh9+aW1ZYtLG3AmdAmRLVclScfg+jxvvKE3N6v/9E+eNWtKYrGmuXPF7NJpe/t28+GHJcTiHPGza4XOM3Un8HTrspg0a2NRkc37TIYnLN2ywqwCJCaBf4yXnIvmFgyOscgKXDQtZDU49Y1vISCBYUuWHGsWwd9UnhZngCHiCJMVcpXcViCzBnIhCmpJhSIxOY6ra2rkVauU0lIJDgm60NEhc0GETQgLwDZy1ZsU262SE8Zw94lbNyzUk+H2stmB+fNjzz7rR1j/wgvWAw84kYjQBWIybL2FLLcb+gLtqSrKGQXhHbAE5jkieKA5udUAyqE6XBkulAKIeIrcNWdeuHJDkbrLZnA+x9m4DZX8hRuA0RJndiN19hlYMDmzYwVrBhUemf/lrxhyGXUsy25sVKqrRTafIz4GV272ALCBn1DngRAWnwBBUIcNC4V7FACBGjtesdp8Rccme+CCrTgCeMQacAHrTN7MZG5/4FpOvtjPGyjT1slcx4DLYZ+XL9ZxZdQdX3HbiDj5xBlGt9pC0rTZu0D1WludlhaZoE5iG+TMDrWUuDyEO1LYvTOSse4w6yL37na7CDfIlXEuZ0FAWAYLEpeRknDmhTsoRJ2S7800gCkrlefc6fK8WcysXVTFc4WIO2IlRJjcdDFmxj6TTFpLpYCLOiu85hbAuP2DK72cpkIYipjF5tAqrxSCZlEOx3XR+Xoi02fFtexvJPFEeZZVNJ/owso9qZRX132plMqoxqXzAlfhlRPpd3t/uN7LPWgse2a5+ayowq0j3H0EzMd68t1gqmke9Xq/xDncyEBe1+1ZY5SiCr4my722PYH94JmzLTBHpsqjW+7G/nM/BfXukB+nsqhbrWYZc48QMXVRcVOUfkl6LRJ5OxaLY1ZseHmJSoUuAyrYs1IoVAd1G3BgJsGg8O34l4GaUhCuVpNCis3HnpOkMN2d8+YN/epXI2VlWt5rFHilRIVOKNVANvv6eef1XXopN1OohoGDGncAshkzx8aFVC5nRBRZL7e14xvdVEKlUylWMDWdTqbTfzj99BU33lhaXDzw2mvRJ59UuFqVTmvgzLpupdPQLom0DrLD1osSZiLhKTiYTAbOxmJVJPt0KLIRfQ2YPUJp7Jht+0jRhnK50PLlZ5x66tDgYOLWW/2RiM/rtRXFJA7vYRhLp3dVVW245ZY6TRt5993Q2FhKlicwH0mqDIV8imJREI5gxl0/Y5toamFl4JwLFdq4R8LhRAwk5zhHA4ElV1yxhEJw+/LLs1u3ygik8JXP15dM7mho8Mdi5QMDGN1AjFVWli0rQ3gERfCPjoaGh+1czvR6546PT3Mck2kQkIY3wetNZ7NvVFbmFi8uNgxfe7uezfZXVp528smi8nP22Tt6e3O9vYGBgfldXQ0IARznnVAoUVqa9PkqL7xwGlUkO2+5peWDD6RYLFJfDz1vPnhw+fbtJbBteErYAgImbqYAmopGJAJPh9K8Tj695lo1oY6laZW6rlKoyNlAlQMuny9jmn9ateqUn/8cYfBIPD4xNqZ5PEVR8fGRNWYMY3R0NJvJePz+iYMHh++6K5bNWpQAY7P0ZDLvVlVNfeSRE+fNg17Ek8mcaa4rLuZgEkNd+OMfI2RDEDO6bRuC0AFI/L77Tj3zzLAkhfNZnuXnn4+fAvmFIiSgxlu2CE5GYMQNkQ55R60QRjn5Crrb8EQQLQwvkykyTf13vzOWLBHs+fHHsXgRqchyIpM5/qKL5lRW4nAYgTh+vgljkuT3eEroW/Gpqhpoa7MefdRB+MECxewdpyEWmz5vnkop3mo4f2wIlKWqyh0JBO7jjysCgYp9+wyPp1qWa955x9/d7SxYIK1Y4Y4cj4taNCa0YAHCZq27u+jTTy0MRXrkdvcyvxAx/KJFMsVfBe/HqRYPba9BjXm6omTS6Wx1tWWaSjwehGGbZiAcVi0rs3hx8OKLlSlTHMxyaEgwhJ4e+9Ah4agbG+Vo1F682HPqqe7swUDvvFOmBrECbgvZL13qWbLEicdFum9gAORx5Pvfr/jRj+AR9Isuco4cEedR76hM+Aoyb2ra8BNP1J92mrN/v3nddfbYmNjGykqR8Qb35Ow8JysLvWnkqylrSTGXQ105TB6wno9M81B9va+42FZVMxTyVldrqqpFIqqmGfF4Lp0uHhlZ9be/FX/yiQH2A/FhhYh7uYOYegodqnQYAKGHH55y5pkiSQq6mu+LPqYL2If//m9p5064VDUfMPhefjm3fn0QLhZyhGZy9MM2STkZr2X5/+u/nKYm+fnntc5Os7RUXDs0BOAwoJUcPAKlKGBwb0fLFIkVmWpTrh9WFETCO/3+7muuOfPcc0so4vG6FYr/WUCUpOS998pbtvh4lvBDHMRzBYPQWA8E/NlsfOvW2Lp1PiDk1KkWtpdidO6jZb4Bt9fn9/+tutobDlcPDlaMjPgHB4euvDLr85nAfE2L+3wqgFrXU8GgAWsKBv3pdOXhw0Pnn99jGIPLl2NjVECjJNUNDZ2QTHrZ6XBXMhw++B9TMVF5yDeqiX3mIBEb1dS0ceNGX77iLo2MSORXBaqNjsrl5VCeUDIZGh8fkOU9mmZHo9ifDMQJbVfVMLWkVqjqcbatwEkcOTI8OFhbVaUMDQnkZErDvXPw8YaRzOXePP30k269tSoWGxkc7D58OIG9UtVASYkXhFyWcTwYDALtgqFQOBzWvF5A/HBvb39zc7SycvmCBUHqgMGwI2Nj8bvvrn3/fYvdLTfQcEc8tXPnVbzQsW/bps+3rLVV2rwZblP+4gvRjzMyolJ7s00eFWuwqquBdkNdXVtOP33ZD35QU1WF2fj8ACkPdiwxOZnOZDKjo32PPjodNqaqKUBRVZW9e7fDSMFPFpDK4QeuaM26dQumivaQ8oYGbrr7f30wme7uymBw/rp1boMTfBgx3Gg4bIGS8wMLhYwH54AozNbYJDh16Pawwn+Cdfzyl5ACHE8SpquqI4D7bDZKA2XT6ZqWlqDPNxAMnn7bbfNrav5uPlVhchkzZmTXrbM/+cSvaRblAx1ACxf1/H4GDpEGDwSiPl/42Wetr7+GmgyEwwlFmd7fL8diImiDuvX3Cw/ywx96jjtOmP+OHdavf409ELAUCilAGY+nq6iooaFB4PCnnzpffy2VlbmRFi2KA08R8EUiGqe8Bd/iBwxI0UX/VzA4nkq9snJleVOTMT7unzYtUlraMTSUhIWUl0/s2rVo587GcNh+910Ebg5C3+ZmofYAFYizslKZPx/eQt28GfQL3CgxMSHuMjAg3B53k3MVmut4wAgAz29/K01M/HXNmrm33TZx3XWlHR0GxieYBWHsPXAg8OqrFaWl1tatTnu7VFIiUa+lNjDwvs/X+cMfxkABOztFKzHWwlEthRAiDUo1BhE5gClwFlcgGAyP66aUf4CiHC4tXXnPPU1U9fl7nVq+3NizB+TQ8/DDomPZstRCao7gEUegyQiqATbYKOCv6HdqbeUuAbe+g/vlch5FyUHZ4LRVdQJotGTJggUL+q64ovf++6FEIUXxEQTmRkfTQ0NYsCjuwWtQZdhQlMF0+utTTvn+1VeD/0488khWUfy6HvX7Teo7dCgUc7sPqRCrcSO9AC0uDkBslNYyvN7ZmYxoJYWc2tqcvj65oUFs0eio0tio7tqFKULVD3s8H9TXT0Fg3dtraBqYUy4cjhpGdTo9FaNjbUAax0FMI+rxY2PwMQ63fZPDgH/cqWmHV60qCwa9HR0DuVzTmjWYXuTyy/8WDCZ6epS+vsojR5YfPRpQVWicwNexMUDd24oyPG8eBJorLz9x0yaMkz733B2joxr8Uy435ZNPlnR2yoUUD5wu17REVnj5cs4bHHvGgqAcugHuobC7yoiP5fEEdB1DY+tUSqb16PofN2y44MYbAZK9vb3BaFTPZHyBQDaRGGptVd56a+mOHcGiIqCINH26Z2ICDpz7pdwEoq4POM6OG264+Oqr4fnGaXKx/1kNBBofTadDV17p2bfv6GOPHQfHdtllQ4bx8T33nHTxxUFg1T8CteFEwv/d7/r7+jjL7QaCFLpqLpRBjQs5awISLAoM9q2ysiKvN2Pb2WnT/EVFvqGhbCplRKOg8k3t7QOx2NnXXtsAc5KkCiAKP+GFT3W1NGvW5Nlnj91xR/CNNzzh8JednWWmWYOAibMwlE6Cgw1ns2doGteBY4OD0vAwLpQoteDs3Alo8FRU1Pf3m0NDIDMDL76IvUAEA5Jx1vbt8PBOWZkze7bMfa0gmAcPSjNn4szyDz80AHWU+udeFrG3lIoSjWlu4opCCplSJLZPtHL+Phar+c1vZlRXIwaKYcOpsyxHQp3o7i695JJ2zPXVV+tqaoBY5n33OfBYMK1Ewli/3vMv/wIal92/X73ssmHTfONHPzrPcaqeflpUjwtkG+jFrnLaNAGWnZ0AlZ5zzql8+OFAV5e+YYOoD5O7hiNMZbN/uuqqExcvrvzJT1Qx76ydzaqy3FdTo7z8cm1NjXHDDfb27SKLKBoyJ9zkFMfSnC2kjAUlNIiICGVjHCcGYqVSM0OhpoaGcp+vRtdB8WAJoXi8tK0tkExWDQyINuV0OkVFI7u9He7H6e+3Ozqk3t7UM88c/vBDQdFiMU8w2FFcfOrFF9dccEHW41EKCSAmt7gWU2luhshEo0QkEtq9O9PWJh05Iko5sZgSiXgRD1jWAUVpWLq0fu3aRHk55GLRozFqIBDu69OhGgjI9u7VwFJGR4Un42ZxzmPmiQYroMaNUBLXOOhhC0HBUin4g+NaWrA/UDALy4DzKC3FCm2gPEGlBoDVNJNoqgVKRM95cCvZ19ksiMc8SiMjLC2NRIogzfp6u6lJ+vhjbmAudH4KPgC27PF4s9nJyckQFOPqqxEk+oAxhpGy7f2O015UlFy/fiNllEf+9V/f/e1vrWhUAyimUqHu7vArrySKigActZpWDeLElINbkzh5yFQcaxTEg5+N4JwWVbfcDDgIuuN0ffHFUVWtxz5kMs7gIHQhijAV4iBKHLCsMS64TZ++o7FxQtex7Qm/P7Ny5aXUlShaCTOZdCDgjI/XlJf76+qc3bvl/HNbDvUCup0Htv16JDJyxhnlmpbq64MOVx89OqOz880TTph66aVLFi2aCd9Opj5n7dqGtWuZXuWE8k7E29sRcpvr13/V1ja8efP83l5+cOBYYanwkKfo8cg3UotYh5vqqSnMa5p7ZPmjjRuPP+20vS0tMCGV2pjN3t7wwYNLmptjgDXD4IKbv7r6uBdekCndB/OvpMIXGLj10EOwfDD+qooKzo1y6wxnPwVeUDZT3Au3vuOOa844AxaYoQdERgxj5LrrztywYRb4Y6HjjnonA52dUnExNtAbjRZFo7XUxCA+S5cOrVmTvfxy38SEzFnXfGAoeDG0XOwwVWtlLrhwWyKhmW5ZbWvXXnvnnQIxC6E2fYay2e4XXww/+aQYgWEZYQr3iLqRVMp+7TXrnXfA8gCViJ99pF0KMUq3SkAQYlMHDNQ+WlIybdkyJd+DKg0MVH/4YQUY1S9/aeBGljXZ1OT/2c9ARazbb7cATmVlQGPQT6m8XJ47V7ngAplyBhW9vToljN0HR7hXlos7FEJpblcjd4fn2/AgeDOXq6ut9XPQumOHDRACZ8ASN22qKCuLXX116r33Ih9/HOrvl+hpF+vBB+3OTgCGCMQw9IEDwruQ9wun0xnQLEBjS4voLaDOMFf2lPREWDcd0f/115vz5gGrna4uZ2JCGRzUqG9AHR3FrAaBTJs2zZk1K9vT4we3Q+AFjevuFuj39tvOSy/Zc+aIW+/d6+bMIDV60EDsH2bFHSCii4dvzPUefoiNDNiDaUEnKfNiPPssXKJMyc3W0dHG++4TzyTNm6ft3Vv3+us6YsN9+6xXXhGFFcTP3LkJhANPzmSgojlsOKKl6mozmVQoe+g+zEkuUDy0geAOtoBY6oMPxPMIEI3Xm/T7WxBLhEJZbJ3fPxoOn0PBee9VV+15/nkRNmD2huGbnIwMDFQODcW6uqCoqt9fgpBNUUyuobmPo2kuAxEqzSm1QsKFq2eUeTa5tMU5Z2pn8gClDh1K6Hqp1wuXYHu9QfD1n/5UdAHDqDBpUl2RpqXkqENo2VFVtYwyW3ZDg/L551L+EXJREKL+qLdAfZcuLR4bA1IgzprX359W1b+sWTN93bqGuXNLy8o8Xm9AUaI0OEy6Zt06jTodTWpkHo3HRzo6jhw8CJeApWqtrYs++miqYVjcoMNPhHKFSeSuAVG8VG4WomhJsFxcnMvxDsM9eqnzaSyVOlRZeQIX0CYmHH7QEfpDGQwI7iPTnAyFjh8bq4QQYZ8ez2gikV64sIY6zrT6eveZcS47pdOaLG+bPn36gw82zZqVhA+jJ7nGdu+OP/XU9x54oJx1gSuVhc5iuInubs4i+ELiUxEMSoilTzqJT8hKUtc77+g//rEGyODOTW4FF2nHhOaWiLiNmZyVUACs2ecLjI3lLMurql1r1+7p7YXixSsrT7r++gCxFgf0gBqHRCwNuQSDnbre8s//vOSMMw7v3v3pq682trerk5N/WrDg9I0b3Q7XRMLi56dJg7DDRi5Xv2DBKmoc9fHy4vHK/fsbMfiVVxogrZOTzuSkmkgMf/e7ZddcA1Oy33zTvOceUXOGBpWWKgjmYjG5pEQ+9VSF4BrrmT1/fo7L5Vy+445hGEskkg8eCi3H/AQXSIXjfFBXN2fz5pjfn4V3NQzQxoiiMChbP/+5uXWrzGUkcqqabXdZlrF58yyaPaK2zn372pubV553XkM+52pceqlz6BBXlR3KocLsTTCNSy9VSkpEUA1UHx5GQCbTA3nu42SapmYybzc1nfrCCyEY5hNPyMBtfj4ZKknVDBH0QolWrlThUDIZ+y9/EfuRfzzcbbuj/gOB0txQe+wxU24iNk0/jJ+0F3GvHwfhcrEte/daW7bYBw+K7jB+4pFcPEIohITx7dudWbOgdZXBYOXq1ctXrz72ZNof/mC3tMj5J0GZDAG0ATz2U0/ZHEVxJx6VbETmmDuXgCa27W1oCHLDc1WVxadxIYrqQW6b9EcfWTt3ii2NRMTMuVILJserJbyk/i9+NJPJVr5UD0CqGRrKDQ1JU6ZYjz1mvfaaG1FR7sJ9vQBX9KglCd+qoVDs2WczgYD/8suVwtPP+IyPiz6Yp59mL+BQXwdXA/hpMYWSW25xnPWQatyiSkZVAVwYA5ciQiIj0M0/S89P6bLExQ87QoxAFV9RkecywzfeB5DvAOCEC9dy+clTXJRMmscf762osN97jxNAMmWtuaGOHxg59gwfh9rgEPhz9my1rg58QDhkRG0I9Pr6uNPDLbtTUdt9tIY4j0yVUYcYKxMBt0uOpivaAiMRzwMPYB/N228XqXaySbfdjBP02IxCJxqxGiGyfPufTKUWCg+hddz8xM/wcLcKV0y51YkKsFxfFVMsvDGBi0/8PDA/Ts6QiK+Af9xHwu0FhcesOJnIlxRaSQsNEUzx+Som9tz1wQ8CYT0ENFyXLTy64VJjtix6St19IQM/jFt4eQRXV0QHAL2vg9kmjyKTSNzOGq5T518acSw/BOullHXh7RFcHHOr5Nz8y+2D1MjCDY+FV2i47wMBMwUhYc3Kl/LcJ9ipE5UVSuwYp4RozTIXrrmJlkO0QuE+FCok94+9xIM7ABmqvF6NOx9l6i4v9Mm7/LPwYCQ1P4mpUDrbfV0Iv0Cm0OrI9+BIKN+Gw+9AEfeDqnOPGHdZ8vsq+EUEvG/s/1lkhVIGV/Z427EeFl/hBN4q1kTuNuFVsQKS0gk/lH/HCI5AvgqHxa5caU+4V921TJ4W3UCUyAvNcsyiaK84mX7s7SmYN+0JFxYKFbBj76UpvJSAuxLyD0WIjn2WciF7zmUh1nZq9nBZA8ZnRpiPhFz/wsE86RFPr9DI6lDnFWaruH1HuD3GpWfg+H0CrvYWGrXzXYPCVrlNstCwQFjFzS7ce+42b3AHOrdFFHqTOK3D5phvk/xm+xM35rlPYHF8zi18kAhDKaNjoWWBxUp/2twfBDlywyoDGNf36E0DtGBqBHD4LTH5J+Tcahj2il4OweIoPHMv5V/tw76EH993Ci25NILMas9NhPmUKOuCm/HHMihn6L4MgIsD4AwsbtwCk8m/u4UVjV9KxJ0eNsdbDBOEl06+yZ+vsmHPsERaKquYRE/EuVAsF/o8C8+A8PtaMJXCe1kKL77R8un7wmMM3JhFyOzQGtz+Ku7k5do6M9t8R4uL21Rt4WbJgmVKBUsu9M4zwufTyYWHzdhT8FuuZH5nEDec8ZYUmjgga36RRDb7fwQYAOkxP+Fzk1pGAAAAAElFTkSuQmCC')))
#print r
# test remind:
#r = c.remind.unread_count.get()
#print r
|
michaelliao/sinaweibopy
|
snspy.py
|
_http
|
python
|
def _http(method, url, headers=None, **kw):
'''
Send http request and return response text.
'''
params = None
boundary = None
if method == 'UPLOAD':
params, boundary = _encode_multipart(**kw)
else:
params = _encode_params(**kw)
http_url = '%s?%s' % (url, params) if method == _HTTP_GET else url
http_body = None if method == 'GET' else params
logging.error('%s: %s' % (method, http_url))
req = urllib2.Request(http_url, data=http_body)
req.add_header('Accept-Encoding', 'gzip')
if headers:
for k, v in headers.iteritems():
req.add_header(k, v)
if boundary:
req.add_header('Content-Type', 'multipart/form-data; boundary=%s' % boundary)
try:
resp = urllib2.urlopen(req, timeout=5)
return _read_http_body(resp)
finally:
pass
|
Send http request and return response text.
|
train
|
https://github.com/michaelliao/sinaweibopy/blob/0f19dd71c1fbd16ee539620c7e9e986887f5c665/snspy.py#L169-L193
|
[
"def _encode_params(**kw):\n '''\n Do url-encode parameters\n\n >>> _encode_params(a=1, b='R&D')\n 'a=1&b=R%26D'\n >>> _encode_params(a=u'\\u4e2d\\u6587', b=['A', 'B', 123])\n 'a=%E4%B8%AD%E6%96%87&b=A&b=B&b=123'\n '''\n def _encode(L, k, v):\n if isinstance(v, unicode):\n L.append('%s=%s' % (k, urllib.quote(v.encode('utf-8'))))\n elif isinstance(v, str):\n L.append('%s=%s' % (k, urllib.quote(v)))\n elif isinstance(v, collections.Iterable):\n for x in v:\n _encode(L, k, x)\n else:\n L.append('%s=%s' % (k, urllib.quote(str(v))))\n args = []\n for k, v in kw.iteritems():\n _encode(args, k, v)\n return '&'.join(args)\n",
"def _encode_multipart(**kw):\n ' build a multipart/form-data body with randomly generated boundary '\n boundary = '----------%s' % hex(int(time.time() * 1000))\n data = []\n for k, v in kw.iteritems():\n data.append('--%s' % boundary)\n if hasattr(v, 'read'):\n # file-like object:\n filename = getattr(v, 'name', '')\n content = v.read()\n data.append('Content-Disposition: form-data; name=\"%s\"; filename=\"hidden\"' % k)\n data.append('Content-Length: %d' % len(content))\n data.append('Content-Type: %s\\r\\n' % _guess_content_type(filename))\n data.append(content)\n else:\n data.append('Content-Disposition: form-data; name=\"%s\"\\r\\n' % k)\n data.append(v.encode('utf-8') if isinstance(v, unicode) else v)\n data.append('--%s--\\r\\n' % boundary)\n return '\\r\\n'.join(data), boundary\n",
"def _read_http_body(http_obj):\n using_gzip = http_obj.headers.get('Content-Encoding', '') == 'gzip'\n body = http_obj.read()\n if using_gzip:\n gzipper = gzip.GzipFile(fileobj=StringIO(body))\n fcontent = gzipper.read()\n gzipper.close()\n return fcontent\n return body\n"
] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
__version__ = '1.0.0'
__author__ = 'Liao Xuefeng (askxuefeng@gmail.com)'
'''
Python client SDK for SNS API using OAuth 2. Require Python 2.6/2.7.
'''
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
import time
import json
import hmac
import hashlib
import base64
import urllib
import urllib2
import urlparse
import gzip
import logging
import mimetypes
import collections
class JsonDict(dict):
'''
General json object that allows attributes to be bound to and also behaves like a dict.
>>> jd = JsonDict(a=1, b='test')
>>> jd.a
1
>>> jd.b
'test'
>>> jd['b']
'test'
>>> jd.c
Traceback (most recent call last):
...
AttributeError: 'JsonDict' object has no attribute 'c'
>>> jd['c']
Traceback (most recent call last):
...
KeyError: 'c'
'''
def __getattr__(self, attr):
try:
return self[attr]
except KeyError:
raise AttributeError(r"'JsonDict' object has no attribute '%s'" % attr)
def __setattr__(self, attr, value):
self[attr] = value
class APIError(StandardError):
'''
raise APIError if receiving json message indicating failure.
'''
def __init__(self, error_code, error, request):
self.error_code = error_code
self.error = error
self.request = request
StandardError.__init__(self, error)
def __str__(self):
return 'APIError: %s: %s, request: %s' % (self.error_code, self.error, self.request)
def _parse_json(s):
'''
Parse json string into JsonDict.
>>> r = _parse_json(r'{"name":"Michael","score":95}')
>>> r.name
u'Michael'
>>> r['score']
95
'''
return json.loads(s, object_hook=lambda pairs: JsonDict(pairs.iteritems()))
def _encode_params(**kw):
'''
Do url-encode parameters
>>> _encode_params(a=1, b='R&D')
'a=1&b=R%26D'
>>> _encode_params(a=u'\u4e2d\u6587', b=['A', 'B', 123])
'a=%E4%B8%AD%E6%96%87&b=A&b=B&b=123'
'''
def _encode(L, k, v):
if isinstance(v, unicode):
L.append('%s=%s' % (k, urllib.quote(v.encode('utf-8'))))
elif isinstance(v, str):
L.append('%s=%s' % (k, urllib.quote(v)))
elif isinstance(v, collections.Iterable):
for x in v:
_encode(L, k, x)
else:
L.append('%s=%s' % (k, urllib.quote(str(v))))
args = []
for k, v in kw.iteritems():
_encode(args, k, v)
return '&'.join(args)
def _encode_multipart(**kw):
' build a multipart/form-data body with randomly generated boundary '
boundary = '----------%s' % hex(int(time.time() * 1000))
data = []
for k, v in kw.iteritems():
data.append('--%s' % boundary)
if hasattr(v, 'read'):
# file-like object:
filename = getattr(v, 'name', '')
content = v.read()
data.append('Content-Disposition: form-data; name="%s"; filename="hidden"' % k)
data.append('Content-Length: %d' % len(content))
data.append('Content-Type: %s\r\n' % _guess_content_type(filename))
data.append(content)
else:
data.append('Content-Disposition: form-data; name="%s"\r\n' % k)
data.append(v.encode('utf-8') if isinstance(v, unicode) else v)
data.append('--%s--\r\n' % boundary)
return '\r\n'.join(data), boundary
def _guess_content_type(url):
'''
Guess content type by url.
>>> _guess_content_type('http://test/A.HTML')
'text/html'
>>> _guess_content_type('http://test/a.jpg')
'image/jpeg'
>>> _guess_content_type('/path.txt/aaa')
'application/octet-stream'
'''
OCTET_STREAM = 'application/octet-stream'
n = url.rfind('.')
if n == -1:
return OCTET_STREAM
return mimetypes.types_map.get(url[n:].lower(), OCTET_STREAM)
_HTTP_GET = 'GET'
_HTTP_POST = 'POST'
_HTTP_UPLOAD = 'UPLOAD'
def _read_http_body(http_obj):
using_gzip = http_obj.headers.get('Content-Encoding', '') == 'gzip'
body = http_obj.read()
if using_gzip:
gzipper = gzip.GzipFile(fileobj=StringIO(body))
fcontent = gzipper.read()
gzipper.close()
return fcontent
return body
class SNSMixin(object):
def __init__(self, app_key, app_secret, redirect_uri):
self._client_id = app_key
self._client_secret = app_secret
self._redirect_uri = redirect_uri
def _prepare_api(self, method, path, access_token, **kw):
raise StandardError('Subclass must implement \'_prepare_api\' method.')
def on_http_error(self, e):
try:
r = _parse_json(_read_http_body(e))
except:
r = None
if hasattr(r, 'error_code'):
raise APIError(r.error_code, r.get('error', ''), r.get('request', ''))
raise e
class SinaWeiboMixin(SNSMixin):
def get_authorize_url(self, redirect_uri, **kw):
'''
return the authorization url that the user should be redirected to.
'''
redirect = redirect_uri if redirect_uri else self._redirect_uri
if not redirect:
raise APIError('21305', 'Parameter absent: redirect_uri', 'OAuth2 request')
response_type = kw.pop('response_type', 'code')
return 'https://api.weibo.com/oauth2/authorize?%s' % \
_encode_params(client_id=self._client_id,
response_type=response_type,
redirect_uri=redirect, **kw)
def _prepare_api(self, method, path, access_token, **kw):
'''
Get api url.
'''
headers = None
if access_token:
headers = {'Authorization': 'OAuth2 %s' % access_token}
if '/remind/' in path:
# sina remind api url is different:
return method, 'https://rm.api.weibo.com/2/%s.json' % path, headers, kw
if method == 'POST' and 'pic' in kw:
# if 'pic' in parameter, set to UPLOAD mode:
return 'UPLOAD', 'https://api.weibo.com/2/%s.json' % path, headers, kw
return method, 'https://api.weibo.com/2/%s.json' % path, headers, kw
def request_access_token(self, code, redirect_uri=None):
'''
Return access token as a JsonDict: {"access_token":"your-access-token","expires":12345678,"uid":1234}, expires is represented using standard unix-epoch-time
'''
redirect = redirect_uri or self._redirect_uri
resp_text = _http('POST', 'https://api.weibo.com/oauth2/access_token',
client_id=self._client_id, client_secret=self._client_secret,
redirect_uri=redirect, code=code, grant_type='authorization_code')
r = _parse_json(resp_text)
current = int(time.time())
expires = r.expires_in + current
remind_in = r.get('remind_in', None)
if remind_in:
rtime = int(remind_in) + current
if rtime < expires:
expires = rtime
return JsonDict(access_token=r.access_token, expires=expires, uid=r.get('uid', None))
def parse_signed_request(self, signed_request):
'''
parse signed request when using in-site app.
Returns:
dict object like { 'uid': 12345, 'access_token': 'ABC123XYZ', 'expires': unix-timestamp },
or None if parse failed.
'''
def _b64_normalize(s):
appendix = '=' * (4 - len(s) % 4)
return s.replace('-', '+').replace('_', '/') + appendix
sr = str(signed_request)
logging.info('parse signed request: %s' % sr)
enc_sig, enc_payload = sr.split('.', 1)
sig = base64.b64decode(_b64_normalize(enc_sig))
data = _parse_json(base64.b64decode(_b64_normalize(enc_payload)))
if data['algorithm'] != u'HMAC-SHA256':
return None
expected_sig = hmac.new(self.client_secret, enc_payload, hashlib.sha256).digest()
if expected_sig == sig:
data.user_id = data.uid = data.get('user_id', None)
data.access_token = data.get('oauth_token', None)
expires = data.get('expires', None)
if expires:
data.expires = data.expires_in = time.time() + expires
return data
return None
class QQMixin(SNSMixin):
def get_authorize_url(self, redirect_uri='', **kw):
'''
return the authorization url that the user should be redirected to.
'''
redirect = redirect_uri if redirect_uri else self._redirect_uri
if not redirect:
raise APIError('21305', 'Parameter absent: redirect_uri', 'OAuth2 request')
response_type = kw.pop('response_type', 'code')
return 'https://graph.qq.com/oauth2.0/authorize?%s' % \
_encode_params(client_id=self._client_id,
response_type=response_type,
redirect_uri=redirect, **kw)
def _prepare_api(self, method, path, access_token, **kw):
kw['access_token'] = access_token
kw['oauth_consumer_key'] = self._client_id
return method, 'https://graph.qq.com/%s' % path, None, kw
def request_access_token(self, code, redirect_uri=None):
'''
Return access token as a JsonDict: {"access_token":"your-access-token","expires":12345678,"uid":1234}, expires is represented using standard unix-epoch-time
'''
redirect = redirect_uri or self._redirect_uri
resp_text = _http('POST', 'https://graph.qq.com/oauth2.0/token',
client_id=self._client_id, client_secret=self._client_secret,
redirect_uri=redirect, code=code, grant_type='authorization_code')
return self._parse_access_token(resp_text)
def refresh_access_token(self, refresh_token, redirect_uri=None):
'''
Refresh access token.
'''
redirect = redirect_uri or self._redirect_uri
resp_text = _http('POST', 'https://graph.qq.com/oauth2.0/token',
refresh_token=refresh_token,
client_id=self._client_id, client_secret=self._client_secret,
redirect_uri=redirect, grant_type='refresh_token')
return self._parse_access_token(resp_text)
# FIXME: get oauthid from 'https://graph.z.qq.com/moc2/me?access_token=%s' % access_token
def _parse_access_token(self, resp_text):
' parse access token from urlencoded str like access_token=abcxyz&expires_in=123000&other=true '
r = self._qs2dict(resp_text)
access_token = r.pop('access_token')
expires = time.time() + float(r.pop('expires_in'))
return JsonDict(access_token=access_token, expires=expires, **r)
def _qs2dict(self, text):
qs = urlparse.parse_qs(text)
return dict(((k, v[0]) for k, v in qs.iteritems()))
def get_openid(self, access_token):
resp_text = _http('GET', 'https://graph.z.qq.com/moc2/me', access_token=access_token)
r = self._qs2dict(resp_text)
return r['openid']
class APIClient(object):
'''
API client using synchronized invocation.
'''
def __init__(self, mixin, app_key, app_secret, redirect_uri='', access_token='', expires=0.0):
self._mixin = mixin(app_key, app_secret, redirect_uri)
self._access_token = str(access_token)
self._expires = expires
def set_access_token(self, access_token, expires):
self._access_token = str(access_token)
self._expires = float(expires)
def get_authorize_url(self, redirect_uri='', **kw):
'''
return the authorization url that the user should be redirected to.
'''
return self._mixin.get_authorize_url(redirect_uri or self._mixin._redirect_uri, **kw)
def request_access_token(self, code, redirect_uri=None):
'''
Return access token as a JsonDict:
{
"access_token": "your-access-token",
"expires": 12345678, # represented using standard unix-epoch-time
"uid": 1234 # other fields
}
'''
r = self._mixin.request_access_token(code, redirect_uri)
self._access_token = r.access_token
return r
def refresh_token(self, refresh_token):
req_str = '%s%s' % (self.auth_url, 'access_token')
r = _http('POST', req_str,
client_id=self.client_id,
client_secret=self.client_secret,
refresh_token=refresh_token,
grant_type='refresh_token')
return self._parse_access_token(r)
def is_expires(self):
return not self.access_token or time.time() > self.expires
def call_api(self, http_method, http_path, **kw):
method, the_url, headers, params = self._mixin._prepare_api(http_method, http_path, self._access_token, **kw)
logging.debug('Call API: %s: %s' % (method, the_url))
try:
resp = _http(method, the_url, headers, **params)
except urllib2.HTTPError, e:
return self._mixin.on_http_error(e)
r = _parse_json(resp)
if hasattr(r, 'error_code'):
raise APIError(r.error_code, r.get('error', ''), r.get('request', ''))
return r
def __getattr__(self, attr):
if hasattr(self._mixin, attr):
return getattr(self._mixin, attr)
return _Callable(self, attr)
class _Executable(object):
def __init__(self, client, method, path):
self._client = client
self._method = method
self._path = path
def __call__(self, **kw):
return self._client.call_api(self._method, self._path, **kw)
def __str__(self):
return '_Executable (%s %s)' % (self._method, self._path)
__repr__ = __str__
class _Callable(object):
def __init__(self, client, name):
self._client = client
self._name = name
def __getattr__(self, attr):
if attr == 'get':
return _Executable(self._client, 'GET', self._name)
if attr == 'post':
return _Executable(self._client, 'POST', self._name)
name = '%s/%s' % (self._name, attr)
return _Callable(self._client, name)
def __str__(self):
return '_Callable (%s)' % self._name
__repr__ = __str__
if __name__ == '__main__':
#import doctest
#doctest.testmod()
APP_KEY = '???'
APP_SECRET = '???'
access_token = '???'
expires = 1393739173.5
#c = APIClient(QQMixin, APP_KEY, APP_SECRET, 'http://www.liaoxuefeng.com/auth/callback', access_token, expires)
#print c.get_openid(access_token)
#r = c.user.get_user_info.get(openid=openid)
# test get:
#r = c.statuses.home_timeline.get(count=10)
#print r
# test post:
#r = c.statuses.update.post(status=u'测试http post')
#print r
# test upload:
#r = c.statuses.upload.post(status=u'测试upload pic', pic=StringIO(base64.b64decode('iVBORw0KGgoAAAANSUhEUgAAAFAAAABQCAIAAAABc2X6AAAAGXRFWHRTb2Z0d2FyZQBBZG9iZSBJbWFnZVJlYWR5ccllPAAALPBJREFUeNqEfAmYVOWV9t1qr67qpXql6W6gAdmVZgfFFXEnGjXBKGPUmDijj8vEuCVqYsaMPnGNjtskOo8LiCbRRBExSDKKiiIq0DR023s3vVVvtd/1f79zbhVO/sz/19MPdN+697vfd75z3vOe5V45u2KFZNuSosi5nOT1SrouGYbk8Uiq6gQC+BPH8YtsmjhHfGtZ+NbxenHE8XjEVaoqmaas6/hT0jQHf+JMHMGw9JWED67CgPSVjN8VxZFl2TAwDr4Sg8uyOM228R++EjPBNBxHnOA4+EaMJv6TxUHcF5fgh77CTfGPQ5eLC2lkcRxn4iuMiani1vjdMBRxP8tSMN1QyL3A55PpbDmdlnGNzyeuxBrwwbe4El9hRFqVGC6bxWyEdGi1Mi1exrcYDQLC+RAEBsE8cByrxSzpRrhESaXE1DEViMPnc/x+sSj8sBwxGk1dpsWI2ePygoBwC/zgND5HUZRcTmYx0TrdO7K88C8JXcNBiYWHDwQP4eECHMS42IFQSOb9z2SwPLF4zJ7W6c4Di8QRWRZ7jtFxHOPiIL7FUJmMWAaLD5MjGbMeyVghJoGDuJy2DpeLcfAnzsEEcBf8jgt5rpAsjrPK4E+6o3uQhCvmBnlB3Hwj2nbWCPGD8UkjSAPxwzqD0bEw0mcxBEbExYYhp1JilrxRuFjThALTOa5S4BxMggbBjYUV4H6YE/7lGeOHF4ARsDB88C9Mg/cnPwc2LneikAj+py0VF/Iy+O7Yf1yO33EVzuS74Ka4tSJ0VgxApuSQyFwtowkr7nZDGbJZh7/je2MZJEJsslNUBGUT68cmk9U5pA5sjQUR4Bcb36bTWIaSSGBMVg2HrMAdGUdwR5qB2BysnzUfIuPVYvN5nZgSjmMxLGX+kOkKrQkGZVYEljt9xPkYHydDNxkL+CtIljECNixugyuxBllWsL34Ipt1zQBanUzCkoW2YDjMyTSV0VEX3khPXGvBPNjkfD6vBoUWuOJuC0+lYLrYB2wdxIezYMY4wurHukpw5d6OJirlcVHNZj0AC0iWrFSsgUGHAYUVmJbEB4+pNE0PC5FIHApLnaFInMTQB9vA4iGFcBjXeHRdy2TMbFYjDHN4W2DwZLcMsFiAYpp2IvF5KtWRyWDlEi0Sx2W2TMJqcT62iFRDJvx04YcBgj9QK8IwiffKcVRFycryYV1PY2QWDYsPy8NN2ZuwkuNesGRWfl4UCx3yxWmyrLnGAIHhB9JimyFFFYaqKFhkh2XtmDkzUF8/59Ch43t7GUvcW7KzwcohfkX5fVmZ8rOf1Xg84Ycfjg0MWDgOqWHSsC5gPuMCBABpYt9wI+wn9hDX0hqE4mJAlg5hIW4BA5vIZl9avLjirLOOdnU1bd0akGWbUZC8hkyKiam6fo5MVxwk2xS/E2KzMDXXBlj2vC3kaYTWAaINw0yndy1ZcuZjj02NRgePHk1fcUUoHreKizGizB6P9FlxnOFcLvid72y44AKMm2hpsZ55BgriEDyydblGhbuw5fO/hM9iWxiZyXmw8Qv/GQppivJRKLTm7rtPaGgAHKVaW6UPPpCCQd46eXLS1QUWPfsXfOX3y2TJvIWuMgrQwqlEJ8QX7G+YcuB3Qp20ac46+eRp0ShsYkp1tX/2bCmTASaJZWAbgkGvZXlzOSiuR1Vrq6tZkEX4Cj+WBTX1ZjIQu8K6Q5h3jBtgZrBqGA5tgrgvpgjgIN4iZplO29lscMaMOQ0NYvttO6Dr0GFhzMAaIkXCG2HNjNW0Kok8uUOI694lb3euwzzmrFn12bTIlsKyXN3cDE8idCWbVQ8fFvPDhHB+LvcXRRmvra2amCjN5ToMI8TLkKTOcDiFBet6Ly4MBAABc2QZN7fZ1UP2BAHsHoWd094qRIkESvGHlBynlU9MZHM5P1YFBO3vZ67i+kUiSA6ZpOAUeZRiSxHnY3BmVhgZFirxSTgXaoxfgkHiHTJwX8XsgczB4JS//tV4/nnvwoXSSy+pw8M2Rvf5sHvbwuHEnXcuXbYsPT7e09rq9fuXLF/OUy3btKlzxoyxoaGy6dMD4XA6lxvfsiW8bRu7pWMcEHMiIxRGjlky6kBdyaTZssCfAt3d40ePFjc0OOSB2dbYoTL0sOG4rpugR6yFEZ4FhyPkO4UTFz4N/zJIwPt5PKosq6lUXNctyAZQ7ziT994LRbJNMwhAsm0tl4uYpm/dunVnnSWQoaxs3owZ0jc+RT7fyaed9s0j1uuvm6xyRBgdJoYs6Dx7FfNjZ8FcQFHgjzAfr65DdhIWjIlZllrg5/jHNC0akMksVmFHoxhHwbDM+XEv5t7kRDXmDxCk62CwQmhpLvdGWZm9enWktNQ2DBuKXVuL45rXa6VSmcHBXCoV6+yc295uHD6s1tVJ5E6djg65pkaKRIRMW1udo0fFPY4ckVIp58sv7S++kBmZMAMYHvtP8nNeQkqm8ZbjWMTDvB6PbpoDAELLata0GDktJxr9dMUKfdcuaN9oOFwiyzMmJsp9PtDUVCYTgGu0LIOdHFE9l7GQAQs9gvHnFi2CbGzGNFyQyWi2vXnGjBm//vWy6dOl//2TlKTczTdH33/frq0VwK5p6tdft6xfP+3BB+E2jI0bnb17MT/hjaC3uH006hTILXNSUjZvMNg8Nvbl1Knlth0dHa00jCpsrNe717L2zJ5dvHIltlcpKzv/e9/z0rZPGEZXZ+fk2FhJRQW29+iePXNffvnLVKp/xYq6gYHj9u2rg2IGAg5zCiKeQncYzDweTSKyjmkx/YC8rXh8Vn19E68Wm3P4sJRMCkoEzccWjY9L06bJVVXheDzc34+vpI4ON3iQ5a6BgalMeKJRH9QP1hoIpMDPsG+67ocJ+XymQAgK5jwenLN7cnLvpk3rr7wSujrY07O/s7P/nXfmffBByw9+cOlNN5XnuVrhE/V4Fs6cWfhzYWPj8KFDdmnp1TfdNGGaB957L/7444t6ehxGZgpXZAoE2EVpTJiY2bsuKxKZv3evfsstSjKpdHVh/XBCKuSkaRYUAUbu9do1NfrQ0N/S6cSsWeXJZGxiwjSMIUUZmT8/RPrTfe65n8XjxYnECFRx7lyoIzStaHLyhD17ai3LJgOGWg7r+v4LL/zhrbcS1ZCmV1VJS5emgHznnnt+W1txV5cE9eEAq0AeoZnYA0wVKgndOXiweNu206ZMsZuaSk466cT16/sWLRq56qrynh4zEhG0HMZM8YmwXAHdK1ZwqCHGgmnRL1ByJ5GAxx+Gp/Z6cWhEVaFaUbhlTYNfnZrLvTRnTsntt8+fOzeXTPa3teV0PVBcfPzChVFyfdBXuKUEPKrjVCP2IG6clqSBP/95yl13AQ6wA4DXLxXF+s//bAL+4zM6au/Z47S1OXv22IcPA3Vs6NSUKfCFRizm3H9/CE5+bMy4+Wb700/lfHwvYlIgvKZh8MyTT1YuW4Zv4ps3h3/xC/gXh3bRIafNYY/GvEximgZF55AILKKkZI9tf3reeTXFxWnLKp01y6coR/r6TJ8vqCjGc8/Vrl9/yoknkgsqm11f/3eKB0Us0vWiAh+ig8FcbjpEwz4fik2gWlJaKr7r7tavvdbp7hbGhqs0DbAiDKqtTbir9vbm115bev319q5d9ocfKgjgYIkADswWjJAwP5ROd73wQvGyZdCm8Nq19qOPqkSNhEoXcEtVNTfC4gQFc24iW/bkZO+JJ37/rruC/wixUul0xR/+YM2eLYOEDA7CPcK8nYkJzFvoDyKqRCIxNPT+CSfMOvvs2J//XOH3gzDZgO7OTqAXB2SQtxkIKKSr1q5dAHmFeL+gmeSQOX52gNi5nNrdLXbi7bflQrSYT2IwHYYmal1dExMTFdGot7TUCIfloSGHWQbUlkHbNDUHrBi35/iOgzhavK1pq+Nxz5EjDm5z8KBwITU1NuQNJSkr8731ltPVZd98M+gHpGuRDssUQioUmgCuPqyoqNi4sXHu3PHPPpMfekjhoBobywCJe+n6aElJXVGRMAF4Lw65OZwklyEIAuIKREuyXLJ3764bb1z5+ecKEJgNGD4mT+yYFIb6+8e6uipgIIDeykp1cFBYK+ZPkKGAdUEowAA3pcJMIK/h0JmqlhbpW9+CmO3x8aQqUjIBgC3moetqURF2JqeqLTBFWa60LOi5qQOI/TrOMc2saaanTVu3YAHQKLhoUQu0VJZLdb0sFLI40gIEYjH19cVMd0dGWMMFQ6CghzM4bqrEsj6urKy/5prJeLzkwAFBVGC37Ns4dHPJjZUYG3OzYlOnSvv2sZ9nhiMUB/vorjOfJXSTJqaJZezz+fZNm1ai6+Pz52NmAXCSwUGs3O/1nnjoUNH4+IuzZ1d8+9vFweDhw4eT7e0mNrakBDupYjaBQN1xxzH2qqtW9T7yiAhlBwcb//SnmSCnnF5T1Zpk0kSAoaq56mpEBWYhCwn7hBqTumE/DcsqmT9/1aJFyQ0bnH37lEgE5ByqhzuKHaJUFDQrRUrhgggznJISERdxSpDMOJ8WoPDNNQmPBxcnbHvvOeece8cdcKEgycWUB00SGuEn9+KLh371qyX/9m/L2V2ffbZBx9V/ZPBBv//0s87i34ePP9669lqAjUBO3KunZzQeD1VUpDdsSLz5ZvXkpMQGrOs5+Aioqq6XOM5exzFmzRJmu3RpHBoxMTGsaQHbrkZEAXzGymFZ4+O7Z806M0+WACLMqzm0FgSb8Fhjgi5TgtNhBktmCVxZXVdXxQhBQb8ciRT19Qnxl5WBzTdqmgfEA66CoksP+8lEQsgVR8bH5TlzJLJP8aFw3+npKXvxRZNTP8T7g5OTOqhLRUXZokX777577xNPZCAIjwdcORmLeY87LhSJmD09ucrKiyjMDk6ffvCnP21ubq6YPx+M8OBXX012dOhVVaGiolQ2O2/DhtqyMveOmDNkxxlyTsJxJifX1MSIJ3PymakfzAYENRxWsR5gL6yfw06E/sBbICfYiM8nyGF5uQKggto4DhQVTJvxQx0fH1u40PP008VglJ9/bv7kJyI0ZTyHEDEJSvebyeTgbbc1bNrkTnJiwoYcECHJMsKPoPT/+WAZKccJyvLf07HOTuPCC7EcOxxWKMHs8hbE7ZwowO0ByzAGBUoF+4a/cpzheHxnJlPp9cK6gDSYHPSncmysFlSxpMSmkAXeImGacRJQSNfLPR6LoTUYNL76avjAgeLVq+19++BInfJyptwS5dwcgi6vpkWef37ilFOiiEAkqRYeS/CGOJslollBXcE9/pcPjDKcz93kKX4Szty6/36O/sFeJIqohYjJE1ECgFMEFHw7lL+FdU0mk1uamlbfdVcsFOru6PAFAl7gh2kebm9v27WraffuCKEdrPqVlSsrTjkFCmanUnXbty8EtgOKVbU0lRr+7DNn9WoTZyKCpYSDm6ZHlELhG1h+5OhR/YYbzMsugxN29u+3cQnoOi6Bvxwbwx2lxkb59ts9jY3urn7yifn444KBlZbifBiasmIFOKZz4AACNbu5WSgzbIrs9FgVie1IZBdOOMH1e9BY3mqv12uaB2U59dRTyxcv/r/lOuk4I7/4Rd2WLdjVRCbTf++98y65hL/qGRoaBt/88kvYoZ1IvH/OOWf++79nP/tMuvZaQLdEiV64Ys68yJRSFd6IQn8Old2MfyEBBG3KZodmzvQ9/3wZItbWVuOKK4AUCrErcW0eDmRKFUqUSHCzGlxgoDQzB4IineBSFo7+OcMCNTCMkKYVU/bQaW42vvMd/VvfMs87L3PTTdnJyYgsT7nkEiMUghUUBQIznnwy9+677AqnVlTUP/TQeH29kkpZsAXskiR5Z8zADkssZiwMi+TYm1KzNrloYdjQAk6AcrTAjgRWVlSU7uvrRUxGAZaA93AY/gaeT8RAsZigorAFXI5dZQrJiVcSAaevZfbDEB/vtUtuOWgEj4Vnsu0kVAuY3dpqf/651NZm9/RYf/xjy+9/L3A+FlNIZwBgCKqkW2/Vb7vN/uQTmFBZNBqZO9cCGzdNdXISJEwFf8zDNQSKgFGQJNgnthDRMkCFyijMw9wEPWUmxL8IhnV9EhtISqFStUBsCYUEEhUxZfa0nF0gSuNwNh8qQF6Dq0i8QAqOyUdzOa+QlFZxJ9Ir7IAPQ+N+ppkKhztHRo6n0pfMaYpsVsNuYBKvvWZu22ZMmwbuocEUsUjK3QMeMJQMLOScA1dYqPjqOkLeSSqUCv4A1OFAlRGOYg+9uDgGzMtXT8RSsU8Yjes+zMnyjocrrG5QBIlAFpwJpmys5u5qnsRyhh07HMCNiabZ06Z9HA7rhmEZxoEZM05kCjE6KjE1VZRWy3p/8WKA7KpDh6IdHUZHh0jxOU42nR4MBlXKIkh0V4fqYO4iOf/GWXgchEC5hkxhENefXeaIP4NBT77IaAOloB1YJ3QYCsJlDXalXL6lfLObtMqvnxNggnhw2CBuwCIh08JAHlVNjIwI7Z0xw/u73/FufLuurhrWArb8H//BqUJcdWDRorOfew5xf+dvflP1zDOCq6gqaMDWaHT2RRdhgxxELfDVVEPmormb4uEMZj4NLnOVjFRA5iIwxI0jmcyY1zubUtZusoaTcPlcMrNjLvS6dUnO+/K3hXNIrTQuz8L9OsxLqNwKidqQIn0Fs1k8b94xX79nj7V1q7Vzp0zfArcWKUqFz+eXpMCNN4Lif/7WW4GOjq/mzVt4ww0rCeTt554T1l5U5JZUITuGEC585/OPvL0y7xXmQFYHu8hms8bs2cWkgyIa5awN+1UWIimOW5SlcTgbyRkrsZHsDjFhkeLhJgLWMciPS6SKglChGDtD8zOfesppbZVBuQA5CFayWYUywOJPx6nft0+/4w7rttuCkcipl102dvHF7W1tG2tryyl9aW/bZr3/voAArqRzYZmXyrUCTqBCV0VKMScVnA1H6Y6jg3XV1jKXElIG/gEyKDx0CK5YNVwTABJh/9i8TZE+c4N8qkXLvGC3/wHaz2UXKtIinp62fbve2Kh9+aW1ZYtLG3AmdAmRLVclScfg+jxvvKE3N6v/9E+eNWtKYrGmuXPF7NJpe/t28+GHJcTiHPGza4XOM3Un8HTrspg0a2NRkc37TIYnLN2ywqwCJCaBf4yXnIvmFgyOscgKXDQtZDU49Y1vISCBYUuWHGsWwd9UnhZngCHiCJMVcpXcViCzBnIhCmpJhSIxOY6ra2rkVauU0lIJDgm60NEhc0GETQgLwDZy1ZsU262SE8Zw94lbNyzUk+H2stmB+fNjzz7rR1j/wgvWAw84kYjQBWIybL2FLLcb+gLtqSrKGQXhHbAE5jkieKA5udUAyqE6XBkulAKIeIrcNWdeuHJDkbrLZnA+x9m4DZX8hRuA0RJndiN19hlYMDmzYwVrBhUemf/lrxhyGXUsy25sVKqrRTafIz4GV272ALCBn1DngRAWnwBBUIcNC4V7FACBGjtesdp8Rccme+CCrTgCeMQacAHrTN7MZG5/4FpOvtjPGyjT1slcx4DLYZ+XL9ZxZdQdX3HbiDj5xBlGt9pC0rTZu0D1WludlhaZoE5iG+TMDrWUuDyEO1LYvTOSse4w6yL37na7CDfIlXEuZ0FAWAYLEpeRknDmhTsoRJ2S7800gCkrlefc6fK8WcysXVTFc4WIO2IlRJjcdDFmxj6TTFpLpYCLOiu85hbAuP2DK72cpkIYipjF5tAqrxSCZlEOx3XR+Xoi02fFtexvJPFEeZZVNJ/owso9qZRX132plMqoxqXzAlfhlRPpd3t/uN7LPWgse2a5+ayowq0j3H0EzMd68t1gqmke9Xq/xDncyEBe1+1ZY5SiCr4my722PYH94JmzLTBHpsqjW+7G/nM/BfXukB+nsqhbrWYZc48QMXVRcVOUfkl6LRJ5OxaLY1ZseHmJSoUuAyrYs1IoVAd1G3BgJsGg8O34l4GaUhCuVpNCis3HnpOkMN2d8+YN/epXI2VlWt5rFHilRIVOKNVANvv6eef1XXopN1OohoGDGncAshkzx8aFVC5nRBRZL7e14xvdVEKlUylWMDWdTqbTfzj99BU33lhaXDzw2mvRJ59UuFqVTmvgzLpupdPQLom0DrLD1osSZiLhKTiYTAbOxmJVJPt0KLIRfQ2YPUJp7Jht+0jRhnK50PLlZ5x66tDgYOLWW/2RiM/rtRXFJA7vYRhLp3dVVW245ZY6TRt5993Q2FhKlicwH0mqDIV8imJREI5gxl0/Y5toamFl4JwLFdq4R8LhRAwk5zhHA4ElV1yxhEJw+/LLs1u3ygik8JXP15dM7mho8Mdi5QMDGN1AjFVWli0rQ3gERfCPjoaGh+1czvR6546PT3Mck2kQkIY3wetNZ7NvVFbmFi8uNgxfe7uezfZXVp528smi8nP22Tt6e3O9vYGBgfldXQ0IARznnVAoUVqa9PkqL7xwGlUkO2+5peWDD6RYLFJfDz1vPnhw+fbtJbBteErYAgImbqYAmopGJAJPh9K8Tj695lo1oY6laZW6rlKoyNlAlQMuny9jmn9ateqUn/8cYfBIPD4xNqZ5PEVR8fGRNWYMY3R0NJvJePz+iYMHh++6K5bNWpQAY7P0ZDLvVlVNfeSRE+fNg17Ek8mcaa4rLuZgEkNd+OMfI2RDEDO6bRuC0AFI/L77Tj3zzLAkhfNZnuXnn4+fAvmFIiSgxlu2CE5GYMQNkQ55R60QRjn5Crrb8EQQLQwvkykyTf13vzOWLBHs+fHHsXgRqchyIpM5/qKL5lRW4nAYgTh+vgljkuT3eEroW/Gpqhpoa7MefdRB+MECxewdpyEWmz5vnkop3mo4f2wIlKWqyh0JBO7jjysCgYp9+wyPp1qWa955x9/d7SxYIK1Y4Y4cj4taNCa0YAHCZq27u+jTTy0MRXrkdvcyvxAx/KJFMsVfBe/HqRYPba9BjXm6omTS6Wx1tWWaSjwehGGbZiAcVi0rs3hx8OKLlSlTHMxyaEgwhJ4e+9Ah4agbG+Vo1F682HPqqe7swUDvvFOmBrECbgvZL13qWbLEicdFum9gAORx5Pvfr/jRj+AR9Isuco4cEedR76hM+Aoyb2ra8BNP1J92mrN/v3nddfbYmNjGykqR8Qb35Ow8JysLvWnkqylrSTGXQ105TB6wno9M81B9va+42FZVMxTyVldrqqpFIqqmGfF4Lp0uHhlZ9be/FX/yiQH2A/FhhYh7uYOYegodqnQYAKGHH55y5pkiSQq6mu+LPqYL2If//m9p5064VDUfMPhefjm3fn0QLhZyhGZy9MM2STkZr2X5/+u/nKYm+fnntc5Os7RUXDs0BOAwoJUcPAKlKGBwb0fLFIkVmWpTrh9WFETCO/3+7muuOfPcc0so4vG6FYr/WUCUpOS998pbtvh4lvBDHMRzBYPQWA8E/NlsfOvW2Lp1PiDk1KkWtpdidO6jZb4Bt9fn9/+tutobDlcPDlaMjPgHB4euvDLr85nAfE2L+3wqgFrXU8GgAWsKBv3pdOXhw0Pnn99jGIPLl2NjVECjJNUNDZ2QTHrZ6XBXMhw++B9TMVF5yDeqiX3mIBEb1dS0ceNGX77iLo2MSORXBaqNjsrl5VCeUDIZGh8fkOU9mmZHo9ifDMQJbVfVMLWkVqjqcbatwEkcOTI8OFhbVaUMDQnkZErDvXPw8YaRzOXePP30k269tSoWGxkc7D58OIG9UtVASYkXhFyWcTwYDALtgqFQOBzWvF5A/HBvb39zc7SycvmCBUHqgMGwI2Nj8bvvrn3/fYvdLTfQcEc8tXPnVbzQsW/bps+3rLVV2rwZblP+4gvRjzMyolJ7s00eFWuwqquBdkNdXVtOP33ZD35QU1WF2fj8ACkPdiwxOZnOZDKjo32PPjodNqaqKUBRVZW9e7fDSMFPFpDK4QeuaM26dQumivaQ8oYGbrr7f30wme7uymBw/rp1boMTfBgx3Gg4bIGS8wMLhYwH54AozNbYJDh16Pawwn+Cdfzyl5ACHE8SpquqI4D7bDZKA2XT6ZqWlqDPNxAMnn7bbfNrav5uPlVhchkzZmTXrbM/+cSvaRblAx1ACxf1/H4GDpEGDwSiPl/42Wetr7+GmgyEwwlFmd7fL8diImiDuvX3Cw/ywx96jjtOmP+OHdavf409ELAUCilAGY+nq6iooaFB4PCnnzpffy2VlbmRFi2KA08R8EUiGqe8Bd/iBwxI0UX/VzA4nkq9snJleVOTMT7unzYtUlraMTSUhIWUl0/s2rVo587GcNh+910Ebg5C3+ZmofYAFYizslKZPx/eQt28GfQL3CgxMSHuMjAg3B53k3MVmut4wAgAz29/K01M/HXNmrm33TZx3XWlHR0GxieYBWHsPXAg8OqrFaWl1tatTnu7VFIiUa+lNjDwvs/X+cMfxkABOztFKzHWwlEthRAiDUo1BhE5gClwFlcgGAyP66aUf4CiHC4tXXnPPU1U9fl7nVq+3NizB+TQ8/DDomPZstRCao7gEUegyQiqATbYKOCv6HdqbeUuAbe+g/vlch5FyUHZ4LRVdQJotGTJggUL+q64ovf++6FEIUXxEQTmRkfTQ0NYsCjuwWtQZdhQlMF0+utTTvn+1VeD/0488khWUfy6HvX7Teo7dCgUc7sPqRCrcSO9AC0uDkBslNYyvN7ZmYxoJYWc2tqcvj65oUFs0eio0tio7tqFKULVD3s8H9TXT0Fg3dtraBqYUy4cjhpGdTo9FaNjbUAax0FMI+rxY2PwMQ63fZPDgH/cqWmHV60qCwa9HR0DuVzTmjWYXuTyy/8WDCZ6epS+vsojR5YfPRpQVWicwNexMUDd24oyPG8eBJorLz9x0yaMkz733B2joxr8Uy435ZNPlnR2yoUUD5wu17REVnj5cs4bHHvGgqAcugHuobC7yoiP5fEEdB1DY+tUSqb16PofN2y44MYbAZK9vb3BaFTPZHyBQDaRGGptVd56a+mOHcGiIqCINH26Z2ICDpz7pdwEoq4POM6OG264+Oqr4fnGaXKx/1kNBBofTadDV17p2bfv6GOPHQfHdtllQ4bx8T33nHTxxUFg1T8CteFEwv/d7/r7+jjL7QaCFLpqLpRBjQs5awISLAoM9q2ysiKvN2Pb2WnT/EVFvqGhbCplRKOg8k3t7QOx2NnXXtsAc5KkCiAKP+GFT3W1NGvW5Nlnj91xR/CNNzzh8JednWWmWYOAibMwlE6Cgw1ns2doGteBY4OD0vAwLpQoteDs3Alo8FRU1Pf3m0NDIDMDL76IvUAEA5Jx1vbt8PBOWZkze7bMfa0gmAcPSjNn4szyDz80AHWU+udeFrG3lIoSjWlu4opCCplSJLZPtHL+Phar+c1vZlRXIwaKYcOpsyxHQp3o7i695JJ2zPXVV+tqaoBY5n33OfBYMK1Ewli/3vMv/wIal92/X73ssmHTfONHPzrPcaqeflpUjwtkG+jFrnLaNAGWnZ0AlZ5zzql8+OFAV5e+YYOoD5O7hiNMZbN/uuqqExcvrvzJT1Qx76ydzaqy3FdTo7z8cm1NjXHDDfb27SKLKBoyJ9zkFMfSnC2kjAUlNIiICGVjHCcGYqVSM0OhpoaGcp+vRtdB8WAJoXi8tK0tkExWDQyINuV0OkVFI7u9He7H6e+3Ozqk3t7UM88c/vBDQdFiMU8w2FFcfOrFF9dccEHW41EKCSAmt7gWU2luhshEo0QkEtq9O9PWJh05Iko5sZgSiXgRD1jWAUVpWLq0fu3aRHk55GLRozFqIBDu69OhGgjI9u7VwFJGR4Un42ZxzmPmiQYroMaNUBLXOOhhC0HBUin4g+NaWrA/UDALy4DzKC3FCm2gPEGlBoDVNJNoqgVKRM95cCvZ19ksiMc8SiMjLC2NRIogzfp6u6lJ+vhjbmAudH4KPgC27PF4s9nJyckQFOPqqxEk+oAxhpGy7f2O015UlFy/fiNllEf+9V/f/e1vrWhUAyimUqHu7vArrySKigActZpWDeLElINbkzh5yFQcaxTEg5+N4JwWVbfcDDgIuuN0ffHFUVWtxz5kMs7gIHQhijAV4iBKHLCsMS64TZ++o7FxQtex7Qm/P7Ny5aXUlShaCTOZdCDgjI/XlJf76+qc3bvl/HNbDvUCup0Htv16JDJyxhnlmpbq64MOVx89OqOz880TTph66aVLFi2aCd9Opj5n7dqGtWuZXuWE8k7E29sRcpvr13/V1ja8efP83l5+cOBYYanwkKfo8cg3UotYh5vqqSnMa5p7ZPmjjRuPP+20vS0tMCGV2pjN3t7wwYNLmptjgDXD4IKbv7r6uBdekCndB/OvpMIXGLj10EOwfDD+qooKzo1y6wxnPwVeUDZT3Au3vuOOa844AxaYoQdERgxj5LrrztywYRb4Y6HjjnonA52dUnExNtAbjRZFo7XUxCA+S5cOrVmTvfxy38SEzFnXfGAoeDG0XOwwVWtlLrhwWyKhmW5ZbWvXXnvnnQIxC6E2fYay2e4XXww/+aQYgWEZYQr3iLqRVMp+7TXrnXfA8gCViJ99pF0KMUq3SkAQYlMHDNQ+WlIybdkyJd+DKg0MVH/4YQUY1S9/aeBGljXZ1OT/2c9ARazbb7cATmVlQGPQT6m8XJ47V7ngAplyBhW9vToljN0HR7hXlos7FEJpblcjd4fn2/AgeDOXq6ut9XPQumOHDRACZ8ASN22qKCuLXX116r33Ih9/HOrvl+hpF+vBB+3OTgCGCMQw9IEDwruQ9wun0xnQLEBjS4voLaDOMFf2lPREWDcd0f/115vz5gGrna4uZ2JCGRzUqG9AHR3FrAaBTJs2zZk1K9vT4we3Q+AFjevuFuj39tvOSy/Zc+aIW+/d6+bMIDV60EDsH2bFHSCii4dvzPUefoiNDNiDaUEnKfNiPPssXKJMyc3W0dHG++4TzyTNm6ft3Vv3+us6YsN9+6xXXhGFFcTP3LkJhANPzmSgojlsOKKl6mozmVQoe+g+zEkuUDy0geAOtoBY6oMPxPMIEI3Xm/T7WxBLhEJZbJ3fPxoOn0PBee9VV+15/nkRNmD2huGbnIwMDFQODcW6uqCoqt9fgpBNUUyuobmPo2kuAxEqzSm1QsKFq2eUeTa5tMU5Z2pn8gClDh1K6Hqp1wuXYHu9QfD1n/5UdAHDqDBpUl2RpqXkqENo2VFVtYwyW3ZDg/L551L+EXJREKL+qLdAfZcuLR4bA1IgzprX359W1b+sWTN93bqGuXNLy8o8Xm9AUaI0OEy6Zt06jTodTWpkHo3HRzo6jhw8CJeApWqtrYs++miqYVjcoMNPhHKFSeSuAVG8VG4WomhJsFxcnMvxDsM9eqnzaSyVOlRZeQIX0CYmHH7QEfpDGQwI7iPTnAyFjh8bq4QQYZ8ez2gikV64sIY6zrT6eveZcS47pdOaLG+bPn36gw82zZqVhA+jJ7nGdu+OP/XU9x54oJx1gSuVhc5iuInubs4i+ELiUxEMSoilTzqJT8hKUtc77+g//rEGyODOTW4FF2nHhOaWiLiNmZyVUACs2ecLjI3lLMurql1r1+7p7YXixSsrT7r++gCxFgf0gBqHRCwNuQSDnbre8s//vOSMMw7v3v3pq682trerk5N/WrDg9I0b3Q7XRMLi56dJg7DDRi5Xv2DBKmoc9fHy4vHK/fsbMfiVVxogrZOTzuSkmkgMf/e7ZddcA1Oy33zTvOceUXOGBpWWKgjmYjG5pEQ+9VSF4BrrmT1/fo7L5Vy+445hGEskkg8eCi3H/AQXSIXjfFBXN2fz5pjfn4V3NQzQxoiiMChbP/+5uXWrzGUkcqqabXdZlrF58yyaPaK2zn372pubV553XkM+52pceqlz6BBXlR3KocLsTTCNSy9VSkpEUA1UHx5GQCbTA3nu42SapmYybzc1nfrCCyEY5hNPyMBtfj4ZKknVDBH0QolWrlThUDIZ+y9/EfuRfzzcbbuj/gOB0txQe+wxU24iNk0/jJ+0F3GvHwfhcrEte/daW7bYBw+K7jB+4pFcPEIohITx7dudWbOgdZXBYOXq1ctXrz72ZNof/mC3tMj5J0GZDAG0ATz2U0/ZHEVxJx6VbETmmDuXgCa27W1oCHLDc1WVxadxIYrqQW6b9EcfWTt3ii2NRMTMuVILJserJbyk/i9+NJPJVr5UD0CqGRrKDQ1JU6ZYjz1mvfaaG1FR7sJ9vQBX9KglCd+qoVDs2WczgYD/8suVwtPP+IyPiz6Yp59mL+BQXwdXA/hpMYWSW25xnPWQatyiSkZVAVwYA5ciQiIj0M0/S89P6bLExQ87QoxAFV9RkecywzfeB5DvAOCEC9dy+clTXJRMmscf762osN97jxNAMmWtuaGOHxg59gwfh9rgEPhz9my1rg58QDhkRG0I9Pr6uNPDLbtTUdt9tIY4j0yVUYcYKxMBt0uOpivaAiMRzwMPYB/N228XqXaySbfdjBP02IxCJxqxGiGyfPufTKUWCg+hddz8xM/wcLcKV0y51YkKsFxfFVMsvDGBi0/8PDA/Ts6QiK+Af9xHwu0FhcesOJnIlxRaSQsNEUzx+Som9tz1wQ8CYT0ENFyXLTy64VJjtix6St19IQM/jFt4eQRXV0QHAL2vg9kmjyKTSNzOGq5T518acSw/BOullHXh7RFcHHOr5Nz8y+2D1MjCDY+FV2i47wMBMwUhYc3Kl/LcJ9ipE5UVSuwYp4RozTIXrrmJlkO0QuE+FCok94+9xIM7ABmqvF6NOx9l6i4v9Mm7/LPwYCQ1P4mpUDrbfV0Iv0Cm0OrI9+BIKN+Gw+9AEfeDqnOPGHdZ8vsq+EUEvG/s/1lkhVIGV/Z427EeFl/hBN4q1kTuNuFVsQKS0gk/lH/HCI5AvgqHxa5caU+4V921TJ4W3UCUyAvNcsyiaK84mX7s7SmYN+0JFxYKFbBj76UpvJSAuxLyD0WIjn2WciF7zmUh1nZq9nBZA8ZnRpiPhFz/wsE86RFPr9DI6lDnFWaruH1HuD3GpWfg+H0CrvYWGrXzXYPCVrlNstCwQFjFzS7ce+42b3AHOrdFFHqTOK3D5phvk/xm+xM35rlPYHF8zi18kAhDKaNjoWWBxUp/2twfBDlywyoDGNf36E0DtGBqBHD4LTH5J+Tcahj2il4OweIoPHMv5V/tw76EH993Ci25NILMas9NhPmUKOuCm/HHMihn6L4MgIsD4AwsbtwCk8m/u4UVjV9KxJ0eNsdbDBOEl06+yZ+vsmHPsERaKquYRE/EuVAsF/o8C8+A8PtaMJXCe1kKL77R8un7wmMM3JhFyOzQGtz+Ku7k5do6M9t8R4uL21Rt4WbJgmVKBUsu9M4zwufTyYWHzdhT8FuuZH5nEDec8ZYUmjgga36RRDb7fwQYAOkxP+Fzk1pGAAAAAElFTkSuQmCC')))
#print r
# test remind:
#r = c.remind.unread_count.get()
#print r
|
michaelliao/sinaweibopy
|
snspy.py
|
SinaWeiboMixin.get_authorize_url
|
python
|
def get_authorize_url(self, redirect_uri, **kw):
'''
return the authorization url that the user should be redirected to.
'''
redirect = redirect_uri if redirect_uri else self._redirect_uri
if not redirect:
raise APIError('21305', 'Parameter absent: redirect_uri', 'OAuth2 request')
response_type = kw.pop('response_type', 'code')
return 'https://api.weibo.com/oauth2/authorize?%s' % \
_encode_params(client_id=self._client_id,
response_type=response_type,
redirect_uri=redirect, **kw)
|
return the authorization url that the user should be redirected to.
|
train
|
https://github.com/michaelliao/sinaweibopy/blob/0f19dd71c1fbd16ee539620c7e9e986887f5c665/snspy.py#L218-L229
|
[
"def _encode_params(**kw):\n '''\n Do url-encode parameters\n\n >>> _encode_params(a=1, b='R&D')\n 'a=1&b=R%26D'\n >>> _encode_params(a=u'\\u4e2d\\u6587', b=['A', 'B', 123])\n 'a=%E4%B8%AD%E6%96%87&b=A&b=B&b=123'\n '''\n def _encode(L, k, v):\n if isinstance(v, unicode):\n L.append('%s=%s' % (k, urllib.quote(v.encode('utf-8'))))\n elif isinstance(v, str):\n L.append('%s=%s' % (k, urllib.quote(v)))\n elif isinstance(v, collections.Iterable):\n for x in v:\n _encode(L, k, x)\n else:\n L.append('%s=%s' % (k, urllib.quote(str(v))))\n args = []\n for k, v in kw.iteritems():\n _encode(args, k, v)\n return '&'.join(args)\n"
] |
class SinaWeiboMixin(SNSMixin):
def _prepare_api(self, method, path, access_token, **kw):
'''
Get api url.
'''
headers = None
if access_token:
headers = {'Authorization': 'OAuth2 %s' % access_token}
if '/remind/' in path:
# sina remind api url is different:
return method, 'https://rm.api.weibo.com/2/%s.json' % path, headers, kw
if method == 'POST' and 'pic' in kw:
# if 'pic' in parameter, set to UPLOAD mode:
return 'UPLOAD', 'https://api.weibo.com/2/%s.json' % path, headers, kw
return method, 'https://api.weibo.com/2/%s.json' % path, headers, kw
def request_access_token(self, code, redirect_uri=None):
'''
Return access token as a JsonDict: {"access_token":"your-access-token","expires":12345678,"uid":1234}, expires is represented using standard unix-epoch-time
'''
redirect = redirect_uri or self._redirect_uri
resp_text = _http('POST', 'https://api.weibo.com/oauth2/access_token',
client_id=self._client_id, client_secret=self._client_secret,
redirect_uri=redirect, code=code, grant_type='authorization_code')
r = _parse_json(resp_text)
current = int(time.time())
expires = r.expires_in + current
remind_in = r.get('remind_in', None)
if remind_in:
rtime = int(remind_in) + current
if rtime < expires:
expires = rtime
return JsonDict(access_token=r.access_token, expires=expires, uid=r.get('uid', None))
def parse_signed_request(self, signed_request):
'''
parse signed request when using in-site app.
Returns:
dict object like { 'uid': 12345, 'access_token': 'ABC123XYZ', 'expires': unix-timestamp },
or None if parse failed.
'''
def _b64_normalize(s):
appendix = '=' * (4 - len(s) % 4)
return s.replace('-', '+').replace('_', '/') + appendix
sr = str(signed_request)
logging.info('parse signed request: %s' % sr)
enc_sig, enc_payload = sr.split('.', 1)
sig = base64.b64decode(_b64_normalize(enc_sig))
data = _parse_json(base64.b64decode(_b64_normalize(enc_payload)))
if data['algorithm'] != u'HMAC-SHA256':
return None
expected_sig = hmac.new(self.client_secret, enc_payload, hashlib.sha256).digest()
if expected_sig == sig:
data.user_id = data.uid = data.get('user_id', None)
data.access_token = data.get('oauth_token', None)
expires = data.get('expires', None)
if expires:
data.expires = data.expires_in = time.time() + expires
return data
return None
|
michaelliao/sinaweibopy
|
snspy.py
|
SinaWeiboMixin._prepare_api
|
python
|
def _prepare_api(self, method, path, access_token, **kw):
'''
Get api url.
'''
headers = None
if access_token:
headers = {'Authorization': 'OAuth2 %s' % access_token}
if '/remind/' in path:
# sina remind api url is different:
return method, 'https://rm.api.weibo.com/2/%s.json' % path, headers, kw
if method == 'POST' and 'pic' in kw:
# if 'pic' in parameter, set to UPLOAD mode:
return 'UPLOAD', 'https://api.weibo.com/2/%s.json' % path, headers, kw
return method, 'https://api.weibo.com/2/%s.json' % path, headers, kw
|
Get api url.
|
train
|
https://github.com/michaelliao/sinaweibopy/blob/0f19dd71c1fbd16ee539620c7e9e986887f5c665/snspy.py#L231-L244
| null |
class SinaWeiboMixin(SNSMixin):
def get_authorize_url(self, redirect_uri, **kw):
'''
return the authorization url that the user should be redirected to.
'''
redirect = redirect_uri if redirect_uri else self._redirect_uri
if not redirect:
raise APIError('21305', 'Parameter absent: redirect_uri', 'OAuth2 request')
response_type = kw.pop('response_type', 'code')
return 'https://api.weibo.com/oauth2/authorize?%s' % \
_encode_params(client_id=self._client_id,
response_type=response_type,
redirect_uri=redirect, **kw)
def request_access_token(self, code, redirect_uri=None):
'''
Return access token as a JsonDict: {"access_token":"your-access-token","expires":12345678,"uid":1234}, expires is represented using standard unix-epoch-time
'''
redirect = redirect_uri or self._redirect_uri
resp_text = _http('POST', 'https://api.weibo.com/oauth2/access_token',
client_id=self._client_id, client_secret=self._client_secret,
redirect_uri=redirect, code=code, grant_type='authorization_code')
r = _parse_json(resp_text)
current = int(time.time())
expires = r.expires_in + current
remind_in = r.get('remind_in', None)
if remind_in:
rtime = int(remind_in) + current
if rtime < expires:
expires = rtime
return JsonDict(access_token=r.access_token, expires=expires, uid=r.get('uid', None))
def parse_signed_request(self, signed_request):
'''
parse signed request when using in-site app.
Returns:
dict object like { 'uid': 12345, 'access_token': 'ABC123XYZ', 'expires': unix-timestamp },
or None if parse failed.
'''
def _b64_normalize(s):
appendix = '=' * (4 - len(s) % 4)
return s.replace('-', '+').replace('_', '/') + appendix
sr = str(signed_request)
logging.info('parse signed request: %s' % sr)
enc_sig, enc_payload = sr.split('.', 1)
sig = base64.b64decode(_b64_normalize(enc_sig))
data = _parse_json(base64.b64decode(_b64_normalize(enc_payload)))
if data['algorithm'] != u'HMAC-SHA256':
return None
expected_sig = hmac.new(self.client_secret, enc_payload, hashlib.sha256).digest()
if expected_sig == sig:
data.user_id = data.uid = data.get('user_id', None)
data.access_token = data.get('oauth_token', None)
expires = data.get('expires', None)
if expires:
data.expires = data.expires_in = time.time() + expires
return data
return None
|
michaelliao/sinaweibopy
|
snspy.py
|
SinaWeiboMixin.request_access_token
|
python
|
def request_access_token(self, code, redirect_uri=None):
'''
Return access token as a JsonDict: {"access_token":"your-access-token","expires":12345678,"uid":1234}, expires is represented using standard unix-epoch-time
'''
redirect = redirect_uri or self._redirect_uri
resp_text = _http('POST', 'https://api.weibo.com/oauth2/access_token',
client_id=self._client_id, client_secret=self._client_secret,
redirect_uri=redirect, code=code, grant_type='authorization_code')
r = _parse_json(resp_text)
current = int(time.time())
expires = r.expires_in + current
remind_in = r.get('remind_in', None)
if remind_in:
rtime = int(remind_in) + current
if rtime < expires:
expires = rtime
return JsonDict(access_token=r.access_token, expires=expires, uid=r.get('uid', None))
|
Return access token as a JsonDict: {"access_token":"your-access-token","expires":12345678,"uid":1234}, expires is represented using standard unix-epoch-time
|
train
|
https://github.com/michaelliao/sinaweibopy/blob/0f19dd71c1fbd16ee539620c7e9e986887f5c665/snspy.py#L246-L262
|
[
"def _parse_json(s):\n '''\n Parse json string into JsonDict.\n\n >>> r = _parse_json(r'{\"name\":\"Michael\",\"score\":95}')\n >>> r.name\n u'Michael'\n >>> r['score']\n 95\n '''\n return json.loads(s, object_hook=lambda pairs: JsonDict(pairs.iteritems()))\n",
"def _http(method, url, headers=None, **kw):\n '''\n Send http request and return response text.\n '''\n params = None\n boundary = None\n if method == 'UPLOAD':\n params, boundary = _encode_multipart(**kw)\n else:\n params = _encode_params(**kw)\n http_url = '%s?%s' % (url, params) if method == _HTTP_GET else url\n http_body = None if method == 'GET' else params\n logging.error('%s: %s' % (method, http_url))\n req = urllib2.Request(http_url, data=http_body)\n req.add_header('Accept-Encoding', 'gzip')\n if headers:\n for k, v in headers.iteritems():\n req.add_header(k, v)\n if boundary:\n req.add_header('Content-Type', 'multipart/form-data; boundary=%s' % boundary)\n try:\n resp = urllib2.urlopen(req, timeout=5)\n return _read_http_body(resp)\n finally:\n pass\n"
] |
class SinaWeiboMixin(SNSMixin):
def get_authorize_url(self, redirect_uri, **kw):
'''
return the authorization url that the user should be redirected to.
'''
redirect = redirect_uri if redirect_uri else self._redirect_uri
if not redirect:
raise APIError('21305', 'Parameter absent: redirect_uri', 'OAuth2 request')
response_type = kw.pop('response_type', 'code')
return 'https://api.weibo.com/oauth2/authorize?%s' % \
_encode_params(client_id=self._client_id,
response_type=response_type,
redirect_uri=redirect, **kw)
def _prepare_api(self, method, path, access_token, **kw):
'''
Get api url.
'''
headers = None
if access_token:
headers = {'Authorization': 'OAuth2 %s' % access_token}
if '/remind/' in path:
# sina remind api url is different:
return method, 'https://rm.api.weibo.com/2/%s.json' % path, headers, kw
if method == 'POST' and 'pic' in kw:
# if 'pic' in parameter, set to UPLOAD mode:
return 'UPLOAD', 'https://api.weibo.com/2/%s.json' % path, headers, kw
return method, 'https://api.weibo.com/2/%s.json' % path, headers, kw
def parse_signed_request(self, signed_request):
'''
parse signed request when using in-site app.
Returns:
dict object like { 'uid': 12345, 'access_token': 'ABC123XYZ', 'expires': unix-timestamp },
or None if parse failed.
'''
def _b64_normalize(s):
appendix = '=' * (4 - len(s) % 4)
return s.replace('-', '+').replace('_', '/') + appendix
sr = str(signed_request)
logging.info('parse signed request: %s' % sr)
enc_sig, enc_payload = sr.split('.', 1)
sig = base64.b64decode(_b64_normalize(enc_sig))
data = _parse_json(base64.b64decode(_b64_normalize(enc_payload)))
if data['algorithm'] != u'HMAC-SHA256':
return None
expected_sig = hmac.new(self.client_secret, enc_payload, hashlib.sha256).digest()
if expected_sig == sig:
data.user_id = data.uid = data.get('user_id', None)
data.access_token = data.get('oauth_token', None)
expires = data.get('expires', None)
if expires:
data.expires = data.expires_in = time.time() + expires
return data
return None
|
michaelliao/sinaweibopy
|
snspy.py
|
SinaWeiboMixin.parse_signed_request
|
python
|
def parse_signed_request(self, signed_request):
'''
parse signed request when using in-site app.
Returns:
dict object like { 'uid': 12345, 'access_token': 'ABC123XYZ', 'expires': unix-timestamp },
or None if parse failed.
'''
def _b64_normalize(s):
appendix = '=' * (4 - len(s) % 4)
return s.replace('-', '+').replace('_', '/') + appendix
sr = str(signed_request)
logging.info('parse signed request: %s' % sr)
enc_sig, enc_payload = sr.split('.', 1)
sig = base64.b64decode(_b64_normalize(enc_sig))
data = _parse_json(base64.b64decode(_b64_normalize(enc_payload)))
if data['algorithm'] != u'HMAC-SHA256':
return None
expected_sig = hmac.new(self.client_secret, enc_payload, hashlib.sha256).digest()
if expected_sig == sig:
data.user_id = data.uid = data.get('user_id', None)
data.access_token = data.get('oauth_token', None)
expires = data.get('expires', None)
if expires:
data.expires = data.expires_in = time.time() + expires
return data
return None
|
parse signed request when using in-site app.
Returns:
dict object like { 'uid': 12345, 'access_token': 'ABC123XYZ', 'expires': unix-timestamp },
or None if parse failed.
|
train
|
https://github.com/michaelliao/sinaweibopy/blob/0f19dd71c1fbd16ee539620c7e9e986887f5c665/snspy.py#L264-L291
|
[
"def _parse_json(s):\n '''\n Parse json string into JsonDict.\n\n >>> r = _parse_json(r'{\"name\":\"Michael\",\"score\":95}')\n >>> r.name\n u'Michael'\n >>> r['score']\n 95\n '''\n return json.loads(s, object_hook=lambda pairs: JsonDict(pairs.iteritems()))\n",
"def _b64_normalize(s):\n appendix = '=' * (4 - len(s) % 4)\n return s.replace('-', '+').replace('_', '/') + appendix\n"
] |
class SinaWeiboMixin(SNSMixin):
def get_authorize_url(self, redirect_uri, **kw):
'''
return the authorization url that the user should be redirected to.
'''
redirect = redirect_uri if redirect_uri else self._redirect_uri
if not redirect:
raise APIError('21305', 'Parameter absent: redirect_uri', 'OAuth2 request')
response_type = kw.pop('response_type', 'code')
return 'https://api.weibo.com/oauth2/authorize?%s' % \
_encode_params(client_id=self._client_id,
response_type=response_type,
redirect_uri=redirect, **kw)
def _prepare_api(self, method, path, access_token, **kw):
'''
Get api url.
'''
headers = None
if access_token:
headers = {'Authorization': 'OAuth2 %s' % access_token}
if '/remind/' in path:
# sina remind api url is different:
return method, 'https://rm.api.weibo.com/2/%s.json' % path, headers, kw
if method == 'POST' and 'pic' in kw:
# if 'pic' in parameter, set to UPLOAD mode:
return 'UPLOAD', 'https://api.weibo.com/2/%s.json' % path, headers, kw
return method, 'https://api.weibo.com/2/%s.json' % path, headers, kw
def request_access_token(self, code, redirect_uri=None):
'''
Return access token as a JsonDict: {"access_token":"your-access-token","expires":12345678,"uid":1234}, expires is represented using standard unix-epoch-time
'''
redirect = redirect_uri or self._redirect_uri
resp_text = _http('POST', 'https://api.weibo.com/oauth2/access_token',
client_id=self._client_id, client_secret=self._client_secret,
redirect_uri=redirect, code=code, grant_type='authorization_code')
r = _parse_json(resp_text)
current = int(time.time())
expires = r.expires_in + current
remind_in = r.get('remind_in', None)
if remind_in:
rtime = int(remind_in) + current
if rtime < expires:
expires = rtime
return JsonDict(access_token=r.access_token, expires=expires, uid=r.get('uid', None))
|
michaelliao/sinaweibopy
|
snspy.py
|
QQMixin.request_access_token
|
python
|
def request_access_token(self, code, redirect_uri=None):
'''
Return access token as a JsonDict: {"access_token":"your-access-token","expires":12345678,"uid":1234}, expires is represented using standard unix-epoch-time
'''
redirect = redirect_uri or self._redirect_uri
resp_text = _http('POST', 'https://graph.qq.com/oauth2.0/token',
client_id=self._client_id, client_secret=self._client_secret,
redirect_uri=redirect, code=code, grant_type='authorization_code')
return self._parse_access_token(resp_text)
|
Return access token as a JsonDict: {"access_token":"your-access-token","expires":12345678,"uid":1234}, expires is represented using standard unix-epoch-time
|
train
|
https://github.com/michaelliao/sinaweibopy/blob/0f19dd71c1fbd16ee539620c7e9e986887f5c665/snspy.py#L314-L322
|
[
"def _http(method, url, headers=None, **kw):\n '''\n Send http request and return response text.\n '''\n params = None\n boundary = None\n if method == 'UPLOAD':\n params, boundary = _encode_multipart(**kw)\n else:\n params = _encode_params(**kw)\n http_url = '%s?%s' % (url, params) if method == _HTTP_GET else url\n http_body = None if method == 'GET' else params\n logging.error('%s: %s' % (method, http_url))\n req = urllib2.Request(http_url, data=http_body)\n req.add_header('Accept-Encoding', 'gzip')\n if headers:\n for k, v in headers.iteritems():\n req.add_header(k, v)\n if boundary:\n req.add_header('Content-Type', 'multipart/form-data; boundary=%s' % boundary)\n try:\n resp = urllib2.urlopen(req, timeout=5)\n return _read_http_body(resp)\n finally:\n pass\n",
"def _parse_access_token(self, resp_text):\n ' parse access token from urlencoded str like access_token=abcxyz&expires_in=123000&other=true '\n r = self._qs2dict(resp_text)\n access_token = r.pop('access_token')\n expires = time.time() + float(r.pop('expires_in'))\n return JsonDict(access_token=access_token, expires=expires, **r)\n"
] |
class QQMixin(SNSMixin):
def get_authorize_url(self, redirect_uri='', **kw):
'''
return the authorization url that the user should be redirected to.
'''
redirect = redirect_uri if redirect_uri else self._redirect_uri
if not redirect:
raise APIError('21305', 'Parameter absent: redirect_uri', 'OAuth2 request')
response_type = kw.pop('response_type', 'code')
return 'https://graph.qq.com/oauth2.0/authorize?%s' % \
_encode_params(client_id=self._client_id,
response_type=response_type,
redirect_uri=redirect, **kw)
def _prepare_api(self, method, path, access_token, **kw):
kw['access_token'] = access_token
kw['oauth_consumer_key'] = self._client_id
return method, 'https://graph.qq.com/%s' % path, None, kw
def refresh_access_token(self, refresh_token, redirect_uri=None):
'''
Refresh access token.
'''
redirect = redirect_uri or self._redirect_uri
resp_text = _http('POST', 'https://graph.qq.com/oauth2.0/token',
refresh_token=refresh_token,
client_id=self._client_id, client_secret=self._client_secret,
redirect_uri=redirect, grant_type='refresh_token')
return self._parse_access_token(resp_text)
# FIXME: get oauthid from 'https://graph.z.qq.com/moc2/me?access_token=%s' % access_token
def _parse_access_token(self, resp_text):
' parse access token from urlencoded str like access_token=abcxyz&expires_in=123000&other=true '
r = self._qs2dict(resp_text)
access_token = r.pop('access_token')
expires = time.time() + float(r.pop('expires_in'))
return JsonDict(access_token=access_token, expires=expires, **r)
def _qs2dict(self, text):
qs = urlparse.parse_qs(text)
return dict(((k, v[0]) for k, v in qs.iteritems()))
def get_openid(self, access_token):
resp_text = _http('GET', 'https://graph.z.qq.com/moc2/me', access_token=access_token)
r = self._qs2dict(resp_text)
return r['openid']
|
michaelliao/sinaweibopy
|
snspy.py
|
QQMixin.refresh_access_token
|
python
|
def refresh_access_token(self, refresh_token, redirect_uri=None):
'''
Refresh access token.
'''
redirect = redirect_uri or self._redirect_uri
resp_text = _http('POST', 'https://graph.qq.com/oauth2.0/token',
refresh_token=refresh_token,
client_id=self._client_id, client_secret=self._client_secret,
redirect_uri=redirect, grant_type='refresh_token')
return self._parse_access_token(resp_text)
|
Refresh access token.
|
train
|
https://github.com/michaelliao/sinaweibopy/blob/0f19dd71c1fbd16ee539620c7e9e986887f5c665/snspy.py#L324-L333
|
[
"def _http(method, url, headers=None, **kw):\n '''\n Send http request and return response text.\n '''\n params = None\n boundary = None\n if method == 'UPLOAD':\n params, boundary = _encode_multipart(**kw)\n else:\n params = _encode_params(**kw)\n http_url = '%s?%s' % (url, params) if method == _HTTP_GET else url\n http_body = None if method == 'GET' else params\n logging.error('%s: %s' % (method, http_url))\n req = urllib2.Request(http_url, data=http_body)\n req.add_header('Accept-Encoding', 'gzip')\n if headers:\n for k, v in headers.iteritems():\n req.add_header(k, v)\n if boundary:\n req.add_header('Content-Type', 'multipart/form-data; boundary=%s' % boundary)\n try:\n resp = urllib2.urlopen(req, timeout=5)\n return _read_http_body(resp)\n finally:\n pass\n",
"def _parse_access_token(self, resp_text):\n ' parse access token from urlencoded str like access_token=abcxyz&expires_in=123000&other=true '\n r = self._qs2dict(resp_text)\n access_token = r.pop('access_token')\n expires = time.time() + float(r.pop('expires_in'))\n return JsonDict(access_token=access_token, expires=expires, **r)\n"
] |
class QQMixin(SNSMixin):
def get_authorize_url(self, redirect_uri='', **kw):
'''
return the authorization url that the user should be redirected to.
'''
redirect = redirect_uri if redirect_uri else self._redirect_uri
if not redirect:
raise APIError('21305', 'Parameter absent: redirect_uri', 'OAuth2 request')
response_type = kw.pop('response_type', 'code')
return 'https://graph.qq.com/oauth2.0/authorize?%s' % \
_encode_params(client_id=self._client_id,
response_type=response_type,
redirect_uri=redirect, **kw)
def _prepare_api(self, method, path, access_token, **kw):
kw['access_token'] = access_token
kw['oauth_consumer_key'] = self._client_id
return method, 'https://graph.qq.com/%s' % path, None, kw
def request_access_token(self, code, redirect_uri=None):
'''
Return access token as a JsonDict: {"access_token":"your-access-token","expires":12345678,"uid":1234}, expires is represented using standard unix-epoch-time
'''
redirect = redirect_uri or self._redirect_uri
resp_text = _http('POST', 'https://graph.qq.com/oauth2.0/token',
client_id=self._client_id, client_secret=self._client_secret,
redirect_uri=redirect, code=code, grant_type='authorization_code')
return self._parse_access_token(resp_text)
# FIXME: get oauthid from 'https://graph.z.qq.com/moc2/me?access_token=%s' % access_token
def _parse_access_token(self, resp_text):
' parse access token from urlencoded str like access_token=abcxyz&expires_in=123000&other=true '
r = self._qs2dict(resp_text)
access_token = r.pop('access_token')
expires = time.time() + float(r.pop('expires_in'))
return JsonDict(access_token=access_token, expires=expires, **r)
def _qs2dict(self, text):
qs = urlparse.parse_qs(text)
return dict(((k, v[0]) for k, v in qs.iteritems()))
def get_openid(self, access_token):
resp_text = _http('GET', 'https://graph.z.qq.com/moc2/me', access_token=access_token)
r = self._qs2dict(resp_text)
return r['openid']
|
michaelliao/sinaweibopy
|
snspy.py
|
QQMixin._parse_access_token
|
python
|
def _parse_access_token(self, resp_text):
' parse access token from urlencoded str like access_token=abcxyz&expires_in=123000&other=true '
r = self._qs2dict(resp_text)
access_token = r.pop('access_token')
expires = time.time() + float(r.pop('expires_in'))
return JsonDict(access_token=access_token, expires=expires, **r)
|
parse access token from urlencoded str like access_token=abcxyz&expires_in=123000&other=true
|
train
|
https://github.com/michaelliao/sinaweibopy/blob/0f19dd71c1fbd16ee539620c7e9e986887f5c665/snspy.py#L336-L341
|
[
"def _qs2dict(self, text):\n qs = urlparse.parse_qs(text)\n return dict(((k, v[0]) for k, v in qs.iteritems()))\n"
] |
class QQMixin(SNSMixin):
def get_authorize_url(self, redirect_uri='', **kw):
'''
return the authorization url that the user should be redirected to.
'''
redirect = redirect_uri if redirect_uri else self._redirect_uri
if not redirect:
raise APIError('21305', 'Parameter absent: redirect_uri', 'OAuth2 request')
response_type = kw.pop('response_type', 'code')
return 'https://graph.qq.com/oauth2.0/authorize?%s' % \
_encode_params(client_id=self._client_id,
response_type=response_type,
redirect_uri=redirect, **kw)
def _prepare_api(self, method, path, access_token, **kw):
kw['access_token'] = access_token
kw['oauth_consumer_key'] = self._client_id
return method, 'https://graph.qq.com/%s' % path, None, kw
def request_access_token(self, code, redirect_uri=None):
'''
Return access token as a JsonDict: {"access_token":"your-access-token","expires":12345678,"uid":1234}, expires is represented using standard unix-epoch-time
'''
redirect = redirect_uri or self._redirect_uri
resp_text = _http('POST', 'https://graph.qq.com/oauth2.0/token',
client_id=self._client_id, client_secret=self._client_secret,
redirect_uri=redirect, code=code, grant_type='authorization_code')
return self._parse_access_token(resp_text)
def refresh_access_token(self, refresh_token, redirect_uri=None):
'''
Refresh access token.
'''
redirect = redirect_uri or self._redirect_uri
resp_text = _http('POST', 'https://graph.qq.com/oauth2.0/token',
refresh_token=refresh_token,
client_id=self._client_id, client_secret=self._client_secret,
redirect_uri=redirect, grant_type='refresh_token')
return self._parse_access_token(resp_text)
# FIXME: get oauthid from 'https://graph.z.qq.com/moc2/me?access_token=%s' % access_token
def _qs2dict(self, text):
qs = urlparse.parse_qs(text)
return dict(((k, v[0]) for k, v in qs.iteritems()))
def get_openid(self, access_token):
resp_text = _http('GET', 'https://graph.z.qq.com/moc2/me', access_token=access_token)
r = self._qs2dict(resp_text)
return r['openid']
|
michaelliao/sinaweibopy
|
snspy.py
|
APIClient.get_authorize_url
|
python
|
def get_authorize_url(self, redirect_uri='', **kw):
'''
return the authorization url that the user should be redirected to.
'''
return self._mixin.get_authorize_url(redirect_uri or self._mixin._redirect_uri, **kw)
|
return the authorization url that the user should be redirected to.
|
train
|
https://github.com/michaelliao/sinaweibopy/blob/0f19dd71c1fbd16ee539620c7e9e986887f5c665/snspy.py#L366-L370
| null |
class APIClient(object):
'''
API client using synchronized invocation.
'''
def __init__(self, mixin, app_key, app_secret, redirect_uri='', access_token='', expires=0.0):
self._mixin = mixin(app_key, app_secret, redirect_uri)
self._access_token = str(access_token)
self._expires = expires
def set_access_token(self, access_token, expires):
self._access_token = str(access_token)
self._expires = float(expires)
def request_access_token(self, code, redirect_uri=None):
'''
Return access token as a JsonDict:
{
"access_token": "your-access-token",
"expires": 12345678, # represented using standard unix-epoch-time
"uid": 1234 # other fields
}
'''
r = self._mixin.request_access_token(code, redirect_uri)
self._access_token = r.access_token
return r
def refresh_token(self, refresh_token):
req_str = '%s%s' % (self.auth_url, 'access_token')
r = _http('POST', req_str,
client_id=self.client_id,
client_secret=self.client_secret,
refresh_token=refresh_token,
grant_type='refresh_token')
return self._parse_access_token(r)
def is_expires(self):
return not self.access_token or time.time() > self.expires
def call_api(self, http_method, http_path, **kw):
method, the_url, headers, params = self._mixin._prepare_api(http_method, http_path, self._access_token, **kw)
logging.debug('Call API: %s: %s' % (method, the_url))
try:
resp = _http(method, the_url, headers, **params)
except urllib2.HTTPError, e:
return self._mixin.on_http_error(e)
r = _parse_json(resp)
if hasattr(r, 'error_code'):
raise APIError(r.error_code, r.get('error', ''), r.get('request', ''))
return r
def __getattr__(self, attr):
if hasattr(self._mixin, attr):
return getattr(self._mixin, attr)
return _Callable(self, attr)
|
michaelliao/sinaweibopy
|
snspy.py
|
APIClient.request_access_token
|
python
|
def request_access_token(self, code, redirect_uri=None):
'''
Return access token as a JsonDict:
{
"access_token": "your-access-token",
"expires": 12345678, # represented using standard unix-epoch-time
"uid": 1234 # other fields
}
'''
r = self._mixin.request_access_token(code, redirect_uri)
self._access_token = r.access_token
return r
|
Return access token as a JsonDict:
{
"access_token": "your-access-token",
"expires": 12345678, # represented using standard unix-epoch-time
"uid": 1234 # other fields
}
|
train
|
https://github.com/michaelliao/sinaweibopy/blob/0f19dd71c1fbd16ee539620c7e9e986887f5c665/snspy.py#L372-L383
| null |
class APIClient(object):
'''
API client using synchronized invocation.
'''
def __init__(self, mixin, app_key, app_secret, redirect_uri='', access_token='', expires=0.0):
self._mixin = mixin(app_key, app_secret, redirect_uri)
self._access_token = str(access_token)
self._expires = expires
def set_access_token(self, access_token, expires):
self._access_token = str(access_token)
self._expires = float(expires)
def get_authorize_url(self, redirect_uri='', **kw):
'''
return the authorization url that the user should be redirected to.
'''
return self._mixin.get_authorize_url(redirect_uri or self._mixin._redirect_uri, **kw)
def refresh_token(self, refresh_token):
req_str = '%s%s' % (self.auth_url, 'access_token')
r = _http('POST', req_str,
client_id=self.client_id,
client_secret=self.client_secret,
refresh_token=refresh_token,
grant_type='refresh_token')
return self._parse_access_token(r)
def is_expires(self):
return not self.access_token or time.time() > self.expires
def call_api(self, http_method, http_path, **kw):
method, the_url, headers, params = self._mixin._prepare_api(http_method, http_path, self._access_token, **kw)
logging.debug('Call API: %s: %s' % (method, the_url))
try:
resp = _http(method, the_url, headers, **params)
except urllib2.HTTPError, e:
return self._mixin.on_http_error(e)
r = _parse_json(resp)
if hasattr(r, 'error_code'):
raise APIError(r.error_code, r.get('error', ''), r.get('request', ''))
return r
def __getattr__(self, attr):
if hasattr(self._mixin, attr):
return getattr(self._mixin, attr)
return _Callable(self, attr)
|
michaelliao/sinaweibopy
|
weibo.py
|
_parse_json
|
python
|
def _parse_json(s):
' parse str into JsonDict '
def _obj_hook(pairs):
' convert json object to python object '
o = JsonDict()
for k, v in pairs.iteritems():
o[str(k)] = v
return o
return json.loads(s, object_hook=_obj_hook)
|
parse str into JsonDict
|
train
|
https://github.com/michaelliao/sinaweibopy/blob/0f19dd71c1fbd16ee539620c7e9e986887f5c665/weibo.py#L46-L55
| null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
__version__ = '1.1.4'
__author__ = 'Liao Xuefeng (askxuefeng@gmail.com)'
'''
Python client SDK for sina weibo API using OAuth 2.
'''
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
import time
import json
import hmac
import hashlib
import base64
import urllib
import urllib2
import gzip
import logging
import mimetypes
import collections
class APIError(StandardError):
'''
raise APIError if receiving json message indicating failure.
'''
def __init__(self, error_code, error, request):
self.error_code = error_code
self.error = error
self.request = request
StandardError.__init__(self, error)
def __str__(self):
return 'APIError: %s: %s, request: %s' % (self.error_code, self.error, self.request)
class JsonDict(dict):
' general json object that allows attributes to be bound to and also behaves like a dict '
def __getattr__(self, attr):
try:
return self[attr]
except KeyError:
raise AttributeError(r"'JsonDict' object has no attribute '%s'" % attr)
def __setattr__(self, attr, value):
self[attr] = value
def _encode_params(**kw):
'''
do url-encode parameters
>>> _encode_params(a=1, b='R&D')
'a=1&b=R%26D'
>>> _encode_params(a=u'\u4e2d\u6587', b=['A', 'B', 123])
'a=%E4%B8%AD%E6%96%87&b=A&b=B&b=123'
'''
args = []
for k, v in kw.iteritems():
if isinstance(v, basestring):
qv = v.encode('utf-8') if isinstance(v, unicode) else v
args.append('%s=%s' % (k, urllib.quote(qv)))
elif isinstance(v, collections.Iterable):
for i in v:
qv = i.encode('utf-8') if isinstance(i, unicode) else str(i)
args.append('%s=%s' % (k, urllib.quote(qv)))
else:
qv = str(v)
args.append('%s=%s' % (k, urllib.quote(qv)))
return '&'.join(args)
def _encode_multipart(**kw):
' build a multipart/form-data body with randomly generated boundary '
boundary = '----------%s' % hex(int(time.time() * 1000))
data = []
for k, v in kw.iteritems():
data.append('--%s' % boundary)
if hasattr(v, 'read'):
# file-like object:
filename = getattr(v, 'name', '')
content = v.read()
data.append('Content-Disposition: form-data; name="%s"; filename="hidden"' % k)
data.append('Content-Length: %d' % len(content))
data.append('Content-Type: %s\r\n' % _guess_content_type(filename))
data.append(content)
else:
data.append('Content-Disposition: form-data; name="%s"\r\n' % k)
data.append(v.encode('utf-8') if isinstance(v, unicode) else v)
data.append('--%s--\r\n' % boundary)
return '\r\n'.join(data), boundary
def _guess_content_type(url):
n = url.rfind('.')
if n == -1:
return 'application/octet-stream'
ext = url[n:]
return mimetypes.types_map.get(ext, 'application/octet-stream')
_HTTP_GET = 0
_HTTP_POST = 1
_HTTP_UPLOAD = 2
def _http_get(url, authorization=None, **kw):
logging.info('GET %s' % url)
return _http_call(url, _HTTP_GET, authorization, **kw)
def _http_post(url, authorization=None, **kw):
logging.info('POST %s' % url)
return _http_call(url, _HTTP_POST, authorization, **kw)
def _http_upload(url, authorization=None, **kw):
logging.info('MULTIPART POST %s' % url)
return _http_call(url, _HTTP_UPLOAD, authorization, **kw)
def _read_body(obj):
using_gzip = obj.headers.get('Content-Encoding', '') == 'gzip'
body = obj.read()
if using_gzip:
gzipper = gzip.GzipFile(fileobj=StringIO(body))
fcontent = gzipper.read()
gzipper.close()
return fcontent
return body
def _http_call(the_url, method, authorization, **kw):
'''
send an http request and return a json object if no error occurred.
'''
params = None
boundary = None
if method == _HTTP_UPLOAD:
# fix sina upload url:
the_url = the_url.replace('https://api.', 'https://upload.api.')
params, boundary = _encode_multipart(**kw)
else:
params = _encode_params(**kw)
if '/remind/' in the_url:
# fix sina remind api:
the_url = the_url.replace('https://api.', 'https://rm.api.')
http_url = '%s?%s' % (the_url, params) if method == _HTTP_GET else the_url
http_body = None if method == _HTTP_GET else params
req = urllib2.Request(http_url, data=http_body)
req.add_header('Accept-Encoding', 'gzip')
if authorization:
req.add_header('Authorization', 'OAuth2 %s' % authorization)
if boundary:
req.add_header('Content-Type', 'multipart/form-data; boundary=%s' % boundary)
try:
resp = urllib2.urlopen(req, timeout=5)
body = _read_body(resp)
r = _parse_json(body)
if hasattr(r, 'error_code'):
raise APIError(r.error_code, r.get('error', ''), r.get('request', ''))
return r
except urllib2.HTTPError as e:
try:
r = _parse_json(_read_body(e))
except:
r = None
if hasattr(r, 'error_code'):
raise APIError(r.error_code, r.get('error', ''), r.get('request', ''))
raise e
class HttpObject(object):
def __init__(self, client, method):
self.client = client
self.method = method
def __getattr__(self, attr):
def wrap(**kw):
if self.client.is_expires():
raise APIError('21327', 'expired_token', attr)
return _http_call('%s%s.json' % (self.client.api_url, attr.replace('__', '/')), self.method, self.client.access_token, **kw)
return wrap
class APIClient(object):
'''
API client using synchronized invocation.
'''
def __init__(self, app_key, app_secret, redirect_uri=None, response_type='code', domain='api.weibo.com', version='2'):
self.client_id = str(app_key)
self.client_secret = str(app_secret)
self.redirect_uri = redirect_uri
self.response_type = response_type
self.auth_url = 'https://%s/oauth2/' % domain
self.api_url = 'https://%s/%s/' % (domain, version)
self.access_token = None
self.expires = 0.0
self.get = HttpObject(self, _HTTP_GET)
self.post = HttpObject(self, _HTTP_POST)
self.upload = HttpObject(self, _HTTP_UPLOAD)
def parse_signed_request(self, signed_request):
'''
parse signed request when using in-site app.
Returns:
dict object like { 'uid': 12345, 'access_token': 'ABC123XYZ', 'expires': unix-timestamp },
or None if parse failed.
'''
def _b64_normalize(s):
appendix = '=' * (4 - len(s) % 4)
return s.replace('-', '+').replace('_', '/') + appendix
sr = str(signed_request)
logging.info('parse signed request: %s' % sr)
enc_sig, enc_payload = sr.split('.', 1)
sig = base64.b64decode(_b64_normalize(enc_sig))
data = _parse_json(base64.b64decode(_b64_normalize(enc_payload)))
if data['algorithm'] != u'HMAC-SHA256':
return None
expected_sig = hmac.new(self.client_secret, enc_payload, hashlib.sha256).digest()
if expected_sig == sig:
data.user_id = data.uid = data.get('user_id', None)
data.access_token = data.get('oauth_token', None)
expires = data.get('expires', None)
if expires:
data.expires = data.expires_in = time.time() + expires
return data
return None
def set_access_token(self, access_token, expires):
self.access_token = str(access_token)
self.expires = float(expires)
def get_authorize_url(self, redirect_uri=None, **kw):
'''
return the authorization url that the user should be redirected to.
'''
redirect = redirect_uri if redirect_uri else self.redirect_uri
if not redirect:
raise APIError('21305', 'Parameter absent: redirect_uri', 'OAuth2 request')
response_type = kw.pop('response_type', 'code')
return '%s%s?%s' % (self.auth_url, 'authorize',
_encode_params(client_id=self.client_id,
response_type=response_type,
redirect_uri=redirect, **kw))
def _parse_access_token(self, r):
'''
new:return access token as a JsonDict: {"access_token":"your-access-token","expires_in":12345678,"uid":1234}, expires_in is represented using standard unix-epoch-time
'''
current = int(time.time())
expires = r.expires_in + current
remind_in = r.get('remind_in', None)
if remind_in:
rtime = int(remind_in) + current
if rtime < expires:
expires = rtime
return JsonDict(access_token=r.access_token, expires=expires, expires_in=expires, uid=r.get('uid', None))
def request_access_token(self, code, redirect_uri=None):
redirect = redirect_uri if redirect_uri else self.redirect_uri
if not redirect:
raise APIError('21305', 'Parameter absent: redirect_uri', 'OAuth2 request')
r = _http_post('%s%s' % (self.auth_url, 'access_token'),
client_id=self.client_id,
client_secret=self.client_secret,
redirect_uri=redirect,
code=code,
grant_type='authorization_code')
return self._parse_access_token(r)
def refresh_token(self, refresh_token):
req_str = '%s%s' % (self.auth_url, 'access_token')
r = _http_post(req_str,
client_id=self.client_id,
client_secret=self.client_secret,
refresh_token=refresh_token,
grant_type='refresh_token')
return self._parse_access_token(r)
def is_expires(self):
return not self.access_token or time.time() > self.expires
def __getattr__(self, attr):
if '__' in attr:
return getattr(self.get, attr)
return _Callable(self, attr)
_METHOD_MAP = {
'GET': _HTTP_GET,
'POST': _HTTP_POST,
'UPLOAD': _HTTP_UPLOAD
}
class _Executable(object):
def __init__(self, client, method, path):
self._client = client
self._method = method
self._path = path
def __call__(self, **kw):
method = _METHOD_MAP[self._method]
if method == _HTTP_POST and 'pic' in kw:
method = _HTTP_UPLOAD
return _http_call('%s%s.json' % (self._client.api_url, self._path), method, self._client.access_token, **kw)
def __str__(self):
return '_Executable (%s %s)' % (self._method, self._path)
__repr__ = __str__
class _Callable(object):
def __init__(self, client, name):
self._client = client
self._name = name
def __getattr__(self, attr):
if attr == 'get':
return _Executable(self._client, 'GET', self._name)
if attr == 'post':
return _Executable(self._client, 'POST', self._name)
name = '%s/%s' % (self._name, attr)
return _Callable(self._client, name)
def __str__(self):
return '_Callable (%s)' % self._name
__repr__ = __str__
if __name__ == '__main__':
import doctest
doctest.testmod()
|
michaelliao/sinaweibopy
|
weibo.py
|
_encode_params
|
python
|
def _encode_params(**kw):
'''
do url-encode parameters
>>> _encode_params(a=1, b='R&D')
'a=1&b=R%26D'
>>> _encode_params(a=u'\u4e2d\u6587', b=['A', 'B', 123])
'a=%E4%B8%AD%E6%96%87&b=A&b=B&b=123'
'''
args = []
for k, v in kw.iteritems():
if isinstance(v, basestring):
qv = v.encode('utf-8') if isinstance(v, unicode) else v
args.append('%s=%s' % (k, urllib.quote(qv)))
elif isinstance(v, collections.Iterable):
for i in v:
qv = i.encode('utf-8') if isinstance(i, unicode) else str(i)
args.append('%s=%s' % (k, urllib.quote(qv)))
else:
qv = str(v)
args.append('%s=%s' % (k, urllib.quote(qv)))
return '&'.join(args)
|
do url-encode parameters
>>> _encode_params(a=1, b='R&D')
'a=1&b=R%26D'
>>> _encode_params(a=u'\u4e2d\u6587', b=['A', 'B', 123])
'a=%E4%B8%AD%E6%96%87&b=A&b=B&b=123'
|
train
|
https://github.com/michaelliao/sinaweibopy/blob/0f19dd71c1fbd16ee539620c7e9e986887f5c665/weibo.py#L71-L92
| null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
__version__ = '1.1.4'
__author__ = 'Liao Xuefeng (askxuefeng@gmail.com)'
'''
Python client SDK for sina weibo API using OAuth 2.
'''
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
import time
import json
import hmac
import hashlib
import base64
import urllib
import urllib2
import gzip
import logging
import mimetypes
import collections
class APIError(StandardError):
'''
raise APIError if receiving json message indicating failure.
'''
def __init__(self, error_code, error, request):
self.error_code = error_code
self.error = error
self.request = request
StandardError.__init__(self, error)
def __str__(self):
return 'APIError: %s: %s, request: %s' % (self.error_code, self.error, self.request)
def _parse_json(s):
' parse str into JsonDict '
def _obj_hook(pairs):
' convert json object to python object '
o = JsonDict()
for k, v in pairs.iteritems():
o[str(k)] = v
return o
return json.loads(s, object_hook=_obj_hook)
class JsonDict(dict):
' general json object that allows attributes to be bound to and also behaves like a dict '
def __getattr__(self, attr):
try:
return self[attr]
except KeyError:
raise AttributeError(r"'JsonDict' object has no attribute '%s'" % attr)
def __setattr__(self, attr, value):
self[attr] = value
def _encode_multipart(**kw):
' build a multipart/form-data body with randomly generated boundary '
boundary = '----------%s' % hex(int(time.time() * 1000))
data = []
for k, v in kw.iteritems():
data.append('--%s' % boundary)
if hasattr(v, 'read'):
# file-like object:
filename = getattr(v, 'name', '')
content = v.read()
data.append('Content-Disposition: form-data; name="%s"; filename="hidden"' % k)
data.append('Content-Length: %d' % len(content))
data.append('Content-Type: %s\r\n' % _guess_content_type(filename))
data.append(content)
else:
data.append('Content-Disposition: form-data; name="%s"\r\n' % k)
data.append(v.encode('utf-8') if isinstance(v, unicode) else v)
data.append('--%s--\r\n' % boundary)
return '\r\n'.join(data), boundary
def _guess_content_type(url):
n = url.rfind('.')
if n == -1:
return 'application/octet-stream'
ext = url[n:]
return mimetypes.types_map.get(ext, 'application/octet-stream')
_HTTP_GET = 0
_HTTP_POST = 1
_HTTP_UPLOAD = 2
def _http_get(url, authorization=None, **kw):
logging.info('GET %s' % url)
return _http_call(url, _HTTP_GET, authorization, **kw)
def _http_post(url, authorization=None, **kw):
logging.info('POST %s' % url)
return _http_call(url, _HTTP_POST, authorization, **kw)
def _http_upload(url, authorization=None, **kw):
logging.info('MULTIPART POST %s' % url)
return _http_call(url, _HTTP_UPLOAD, authorization, **kw)
def _read_body(obj):
using_gzip = obj.headers.get('Content-Encoding', '') == 'gzip'
body = obj.read()
if using_gzip:
gzipper = gzip.GzipFile(fileobj=StringIO(body))
fcontent = gzipper.read()
gzipper.close()
return fcontent
return body
def _http_call(the_url, method, authorization, **kw):
'''
send an http request and return a json object if no error occurred.
'''
params = None
boundary = None
if method == _HTTP_UPLOAD:
# fix sina upload url:
the_url = the_url.replace('https://api.', 'https://upload.api.')
params, boundary = _encode_multipart(**kw)
else:
params = _encode_params(**kw)
if '/remind/' in the_url:
# fix sina remind api:
the_url = the_url.replace('https://api.', 'https://rm.api.')
http_url = '%s?%s' % (the_url, params) if method == _HTTP_GET else the_url
http_body = None if method == _HTTP_GET else params
req = urllib2.Request(http_url, data=http_body)
req.add_header('Accept-Encoding', 'gzip')
if authorization:
req.add_header('Authorization', 'OAuth2 %s' % authorization)
if boundary:
req.add_header('Content-Type', 'multipart/form-data; boundary=%s' % boundary)
try:
resp = urllib2.urlopen(req, timeout=5)
body = _read_body(resp)
r = _parse_json(body)
if hasattr(r, 'error_code'):
raise APIError(r.error_code, r.get('error', ''), r.get('request', ''))
return r
except urllib2.HTTPError as e:
try:
r = _parse_json(_read_body(e))
except:
r = None
if hasattr(r, 'error_code'):
raise APIError(r.error_code, r.get('error', ''), r.get('request', ''))
raise e
class HttpObject(object):
def __init__(self, client, method):
self.client = client
self.method = method
def __getattr__(self, attr):
def wrap(**kw):
if self.client.is_expires():
raise APIError('21327', 'expired_token', attr)
return _http_call('%s%s.json' % (self.client.api_url, attr.replace('__', '/')), self.method, self.client.access_token, **kw)
return wrap
class APIClient(object):
'''
API client using synchronized invocation.
'''
def __init__(self, app_key, app_secret, redirect_uri=None, response_type='code', domain='api.weibo.com', version='2'):
self.client_id = str(app_key)
self.client_secret = str(app_secret)
self.redirect_uri = redirect_uri
self.response_type = response_type
self.auth_url = 'https://%s/oauth2/' % domain
self.api_url = 'https://%s/%s/' % (domain, version)
self.access_token = None
self.expires = 0.0
self.get = HttpObject(self, _HTTP_GET)
self.post = HttpObject(self, _HTTP_POST)
self.upload = HttpObject(self, _HTTP_UPLOAD)
def parse_signed_request(self, signed_request):
'''
parse signed request when using in-site app.
Returns:
dict object like { 'uid': 12345, 'access_token': 'ABC123XYZ', 'expires': unix-timestamp },
or None if parse failed.
'''
def _b64_normalize(s):
appendix = '=' * (4 - len(s) % 4)
return s.replace('-', '+').replace('_', '/') + appendix
sr = str(signed_request)
logging.info('parse signed request: %s' % sr)
enc_sig, enc_payload = sr.split('.', 1)
sig = base64.b64decode(_b64_normalize(enc_sig))
data = _parse_json(base64.b64decode(_b64_normalize(enc_payload)))
if data['algorithm'] != u'HMAC-SHA256':
return None
expected_sig = hmac.new(self.client_secret, enc_payload, hashlib.sha256).digest()
if expected_sig == sig:
data.user_id = data.uid = data.get('user_id', None)
data.access_token = data.get('oauth_token', None)
expires = data.get('expires', None)
if expires:
data.expires = data.expires_in = time.time() + expires
return data
return None
def set_access_token(self, access_token, expires):
self.access_token = str(access_token)
self.expires = float(expires)
def get_authorize_url(self, redirect_uri=None, **kw):
'''
return the authorization url that the user should be redirected to.
'''
redirect = redirect_uri if redirect_uri else self.redirect_uri
if not redirect:
raise APIError('21305', 'Parameter absent: redirect_uri', 'OAuth2 request')
response_type = kw.pop('response_type', 'code')
return '%s%s?%s' % (self.auth_url, 'authorize',
_encode_params(client_id=self.client_id,
response_type=response_type,
redirect_uri=redirect, **kw))
def _parse_access_token(self, r):
'''
new:return access token as a JsonDict: {"access_token":"your-access-token","expires_in":12345678,"uid":1234}, expires_in is represented using standard unix-epoch-time
'''
current = int(time.time())
expires = r.expires_in + current
remind_in = r.get('remind_in', None)
if remind_in:
rtime = int(remind_in) + current
if rtime < expires:
expires = rtime
return JsonDict(access_token=r.access_token, expires=expires, expires_in=expires, uid=r.get('uid', None))
def request_access_token(self, code, redirect_uri=None):
redirect = redirect_uri if redirect_uri else self.redirect_uri
if not redirect:
raise APIError('21305', 'Parameter absent: redirect_uri', 'OAuth2 request')
r = _http_post('%s%s' % (self.auth_url, 'access_token'),
client_id=self.client_id,
client_secret=self.client_secret,
redirect_uri=redirect,
code=code,
grant_type='authorization_code')
return self._parse_access_token(r)
def refresh_token(self, refresh_token):
req_str = '%s%s' % (self.auth_url, 'access_token')
r = _http_post(req_str,
client_id=self.client_id,
client_secret=self.client_secret,
refresh_token=refresh_token,
grant_type='refresh_token')
return self._parse_access_token(r)
def is_expires(self):
return not self.access_token or time.time() > self.expires
def __getattr__(self, attr):
if '__' in attr:
return getattr(self.get, attr)
return _Callable(self, attr)
_METHOD_MAP = {
'GET': _HTTP_GET,
'POST': _HTTP_POST,
'UPLOAD': _HTTP_UPLOAD
}
class _Executable(object):
def __init__(self, client, method, path):
self._client = client
self._method = method
self._path = path
def __call__(self, **kw):
method = _METHOD_MAP[self._method]
if method == _HTTP_POST and 'pic' in kw:
method = _HTTP_UPLOAD
return _http_call('%s%s.json' % (self._client.api_url, self._path), method, self._client.access_token, **kw)
def __str__(self):
return '_Executable (%s %s)' % (self._method, self._path)
__repr__ = __str__
class _Callable(object):
def __init__(self, client, name):
self._client = client
self._name = name
def __getattr__(self, attr):
if attr == 'get':
return _Executable(self._client, 'GET', self._name)
if attr == 'post':
return _Executable(self._client, 'POST', self._name)
name = '%s/%s' % (self._name, attr)
return _Callable(self._client, name)
def __str__(self):
return '_Callable (%s)' % self._name
__repr__ = __str__
if __name__ == '__main__':
import doctest
doctest.testmod()
|
michaelliao/sinaweibopy
|
weibo.py
|
_http_call
|
python
|
def _http_call(the_url, method, authorization, **kw):
'''
send an http request and return a json object if no error occurred.
'''
params = None
boundary = None
if method == _HTTP_UPLOAD:
# fix sina upload url:
the_url = the_url.replace('https://api.', 'https://upload.api.')
params, boundary = _encode_multipart(**kw)
else:
params = _encode_params(**kw)
if '/remind/' in the_url:
# fix sina remind api:
the_url = the_url.replace('https://api.', 'https://rm.api.')
http_url = '%s?%s' % (the_url, params) if method == _HTTP_GET else the_url
http_body = None if method == _HTTP_GET else params
req = urllib2.Request(http_url, data=http_body)
req.add_header('Accept-Encoding', 'gzip')
if authorization:
req.add_header('Authorization', 'OAuth2 %s' % authorization)
if boundary:
req.add_header('Content-Type', 'multipart/form-data; boundary=%s' % boundary)
try:
resp = urllib2.urlopen(req, timeout=5)
body = _read_body(resp)
r = _parse_json(body)
if hasattr(r, 'error_code'):
raise APIError(r.error_code, r.get('error', ''), r.get('request', ''))
return r
except urllib2.HTTPError as e:
try:
r = _parse_json(_read_body(e))
except:
r = None
if hasattr(r, 'error_code'):
raise APIError(r.error_code, r.get('error', ''), r.get('request', ''))
raise e
|
send an http request and return a json object if no error occurred.
|
train
|
https://github.com/michaelliao/sinaweibopy/blob/0f19dd71c1fbd16ee539620c7e9e986887f5c665/weibo.py#L154-L191
|
[
"def _parse_json(s):\n ' parse str into JsonDict '\n\n def _obj_hook(pairs):\n ' convert json object to python object '\n o = JsonDict()\n for k, v in pairs.iteritems():\n o[str(k)] = v\n return o\n return json.loads(s, object_hook=_obj_hook)\n",
"def _encode_params(**kw):\n '''\n do url-encode parameters\n\n >>> _encode_params(a=1, b='R&D')\n 'a=1&b=R%26D'\n >>> _encode_params(a=u'\\u4e2d\\u6587', b=['A', 'B', 123])\n 'a=%E4%B8%AD%E6%96%87&b=A&b=B&b=123'\n '''\n args = []\n for k, v in kw.iteritems():\n if isinstance(v, basestring):\n qv = v.encode('utf-8') if isinstance(v, unicode) else v\n args.append('%s=%s' % (k, urllib.quote(qv)))\n elif isinstance(v, collections.Iterable):\n for i in v:\n qv = i.encode('utf-8') if isinstance(i, unicode) else str(i)\n args.append('%s=%s' % (k, urllib.quote(qv)))\n else:\n qv = str(v)\n args.append('%s=%s' % (k, urllib.quote(qv)))\n return '&'.join(args)\n",
"def _encode_multipart(**kw):\n ' build a multipart/form-data body with randomly generated boundary '\n boundary = '----------%s' % hex(int(time.time() * 1000))\n data = []\n for k, v in kw.iteritems():\n data.append('--%s' % boundary)\n if hasattr(v, 'read'):\n # file-like object:\n filename = getattr(v, 'name', '')\n content = v.read()\n data.append('Content-Disposition: form-data; name=\"%s\"; filename=\"hidden\"' % k)\n data.append('Content-Length: %d' % len(content))\n data.append('Content-Type: %s\\r\\n' % _guess_content_type(filename))\n data.append(content)\n else:\n data.append('Content-Disposition: form-data; name=\"%s\"\\r\\n' % k)\n data.append(v.encode('utf-8') if isinstance(v, unicode) else v)\n data.append('--%s--\\r\\n' % boundary)\n return '\\r\\n'.join(data), boundary\n",
"def _read_body(obj):\n using_gzip = obj.headers.get('Content-Encoding', '') == 'gzip'\n body = obj.read()\n if using_gzip:\n gzipper = gzip.GzipFile(fileobj=StringIO(body))\n fcontent = gzipper.read()\n gzipper.close()\n return fcontent\n return body\n"
] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
__version__ = '1.1.4'
__author__ = 'Liao Xuefeng (askxuefeng@gmail.com)'
'''
Python client SDK for sina weibo API using OAuth 2.
'''
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
import time
import json
import hmac
import hashlib
import base64
import urllib
import urllib2
import gzip
import logging
import mimetypes
import collections
class APIError(StandardError):
'''
raise APIError if receiving json message indicating failure.
'''
def __init__(self, error_code, error, request):
self.error_code = error_code
self.error = error
self.request = request
StandardError.__init__(self, error)
def __str__(self):
return 'APIError: %s: %s, request: %s' % (self.error_code, self.error, self.request)
def _parse_json(s):
' parse str into JsonDict '
def _obj_hook(pairs):
' convert json object to python object '
o = JsonDict()
for k, v in pairs.iteritems():
o[str(k)] = v
return o
return json.loads(s, object_hook=_obj_hook)
class JsonDict(dict):
' general json object that allows attributes to be bound to and also behaves like a dict '
def __getattr__(self, attr):
try:
return self[attr]
except KeyError:
raise AttributeError(r"'JsonDict' object has no attribute '%s'" % attr)
def __setattr__(self, attr, value):
self[attr] = value
def _encode_params(**kw):
'''
do url-encode parameters
>>> _encode_params(a=1, b='R&D')
'a=1&b=R%26D'
>>> _encode_params(a=u'\u4e2d\u6587', b=['A', 'B', 123])
'a=%E4%B8%AD%E6%96%87&b=A&b=B&b=123'
'''
args = []
for k, v in kw.iteritems():
if isinstance(v, basestring):
qv = v.encode('utf-8') if isinstance(v, unicode) else v
args.append('%s=%s' % (k, urllib.quote(qv)))
elif isinstance(v, collections.Iterable):
for i in v:
qv = i.encode('utf-8') if isinstance(i, unicode) else str(i)
args.append('%s=%s' % (k, urllib.quote(qv)))
else:
qv = str(v)
args.append('%s=%s' % (k, urllib.quote(qv)))
return '&'.join(args)
def _encode_multipart(**kw):
' build a multipart/form-data body with randomly generated boundary '
boundary = '----------%s' % hex(int(time.time() * 1000))
data = []
for k, v in kw.iteritems():
data.append('--%s' % boundary)
if hasattr(v, 'read'):
# file-like object:
filename = getattr(v, 'name', '')
content = v.read()
data.append('Content-Disposition: form-data; name="%s"; filename="hidden"' % k)
data.append('Content-Length: %d' % len(content))
data.append('Content-Type: %s\r\n' % _guess_content_type(filename))
data.append(content)
else:
data.append('Content-Disposition: form-data; name="%s"\r\n' % k)
data.append(v.encode('utf-8') if isinstance(v, unicode) else v)
data.append('--%s--\r\n' % boundary)
return '\r\n'.join(data), boundary
def _guess_content_type(url):
n = url.rfind('.')
if n == -1:
return 'application/octet-stream'
ext = url[n:]
return mimetypes.types_map.get(ext, 'application/octet-stream')
_HTTP_GET = 0
_HTTP_POST = 1
_HTTP_UPLOAD = 2
def _http_get(url, authorization=None, **kw):
logging.info('GET %s' % url)
return _http_call(url, _HTTP_GET, authorization, **kw)
def _http_post(url, authorization=None, **kw):
logging.info('POST %s' % url)
return _http_call(url, _HTTP_POST, authorization, **kw)
def _http_upload(url, authorization=None, **kw):
logging.info('MULTIPART POST %s' % url)
return _http_call(url, _HTTP_UPLOAD, authorization, **kw)
def _read_body(obj):
using_gzip = obj.headers.get('Content-Encoding', '') == 'gzip'
body = obj.read()
if using_gzip:
gzipper = gzip.GzipFile(fileobj=StringIO(body))
fcontent = gzipper.read()
gzipper.close()
return fcontent
return body
class HttpObject(object):
def __init__(self, client, method):
self.client = client
self.method = method
def __getattr__(self, attr):
def wrap(**kw):
if self.client.is_expires():
raise APIError('21327', 'expired_token', attr)
return _http_call('%s%s.json' % (self.client.api_url, attr.replace('__', '/')), self.method, self.client.access_token, **kw)
return wrap
class APIClient(object):
'''
API client using synchronized invocation.
'''
def __init__(self, app_key, app_secret, redirect_uri=None, response_type='code', domain='api.weibo.com', version='2'):
self.client_id = str(app_key)
self.client_secret = str(app_secret)
self.redirect_uri = redirect_uri
self.response_type = response_type
self.auth_url = 'https://%s/oauth2/' % domain
self.api_url = 'https://%s/%s/' % (domain, version)
self.access_token = None
self.expires = 0.0
self.get = HttpObject(self, _HTTP_GET)
self.post = HttpObject(self, _HTTP_POST)
self.upload = HttpObject(self, _HTTP_UPLOAD)
def parse_signed_request(self, signed_request):
'''
parse signed request when using in-site app.
Returns:
dict object like { 'uid': 12345, 'access_token': 'ABC123XYZ', 'expires': unix-timestamp },
or None if parse failed.
'''
def _b64_normalize(s):
appendix = '=' * (4 - len(s) % 4)
return s.replace('-', '+').replace('_', '/') + appendix
sr = str(signed_request)
logging.info('parse signed request: %s' % sr)
enc_sig, enc_payload = sr.split('.', 1)
sig = base64.b64decode(_b64_normalize(enc_sig))
data = _parse_json(base64.b64decode(_b64_normalize(enc_payload)))
if data['algorithm'] != u'HMAC-SHA256':
return None
expected_sig = hmac.new(self.client_secret, enc_payload, hashlib.sha256).digest()
if expected_sig == sig:
data.user_id = data.uid = data.get('user_id', None)
data.access_token = data.get('oauth_token', None)
expires = data.get('expires', None)
if expires:
data.expires = data.expires_in = time.time() + expires
return data
return None
def set_access_token(self, access_token, expires):
self.access_token = str(access_token)
self.expires = float(expires)
def get_authorize_url(self, redirect_uri=None, **kw):
'''
return the authorization url that the user should be redirected to.
'''
redirect = redirect_uri if redirect_uri else self.redirect_uri
if not redirect:
raise APIError('21305', 'Parameter absent: redirect_uri', 'OAuth2 request')
response_type = kw.pop('response_type', 'code')
return '%s%s?%s' % (self.auth_url, 'authorize',
_encode_params(client_id=self.client_id,
response_type=response_type,
redirect_uri=redirect, **kw))
def _parse_access_token(self, r):
'''
new:return access token as a JsonDict: {"access_token":"your-access-token","expires_in":12345678,"uid":1234}, expires_in is represented using standard unix-epoch-time
'''
current = int(time.time())
expires = r.expires_in + current
remind_in = r.get('remind_in', None)
if remind_in:
rtime = int(remind_in) + current
if rtime < expires:
expires = rtime
return JsonDict(access_token=r.access_token, expires=expires, expires_in=expires, uid=r.get('uid', None))
def request_access_token(self, code, redirect_uri=None):
redirect = redirect_uri if redirect_uri else self.redirect_uri
if not redirect:
raise APIError('21305', 'Parameter absent: redirect_uri', 'OAuth2 request')
r = _http_post('%s%s' % (self.auth_url, 'access_token'),
client_id=self.client_id,
client_secret=self.client_secret,
redirect_uri=redirect,
code=code,
grant_type='authorization_code')
return self._parse_access_token(r)
def refresh_token(self, refresh_token):
req_str = '%s%s' % (self.auth_url, 'access_token')
r = _http_post(req_str,
client_id=self.client_id,
client_secret=self.client_secret,
refresh_token=refresh_token,
grant_type='refresh_token')
return self._parse_access_token(r)
def is_expires(self):
return not self.access_token or time.time() > self.expires
def __getattr__(self, attr):
if '__' in attr:
return getattr(self.get, attr)
return _Callable(self, attr)
_METHOD_MAP = {
'GET': _HTTP_GET,
'POST': _HTTP_POST,
'UPLOAD': _HTTP_UPLOAD
}
class _Executable(object):
def __init__(self, client, method, path):
self._client = client
self._method = method
self._path = path
def __call__(self, **kw):
method = _METHOD_MAP[self._method]
if method == _HTTP_POST and 'pic' in kw:
method = _HTTP_UPLOAD
return _http_call('%s%s.json' % (self._client.api_url, self._path), method, self._client.access_token, **kw)
def __str__(self):
return '_Executable (%s %s)' % (self._method, self._path)
__repr__ = __str__
class _Callable(object):
def __init__(self, client, name):
self._client = client
self._name = name
def __getattr__(self, attr):
if attr == 'get':
return _Executable(self._client, 'GET', self._name)
if attr == 'post':
return _Executable(self._client, 'POST', self._name)
name = '%s/%s' % (self._name, attr)
return _Callable(self._client, name)
def __str__(self):
return '_Callable (%s)' % self._name
__repr__ = __str__
if __name__ == '__main__':
import doctest
doctest.testmod()
|
michaelliao/sinaweibopy
|
weibo.py
|
APIClient.get_authorize_url
|
python
|
def get_authorize_url(self, redirect_uri=None, **kw):
'''
return the authorization url that the user should be redirected to.
'''
redirect = redirect_uri if redirect_uri else self.redirect_uri
if not redirect:
raise APIError('21305', 'Parameter absent: redirect_uri', 'OAuth2 request')
response_type = kw.pop('response_type', 'code')
return '%s%s?%s' % (self.auth_url, 'authorize',
_encode_params(client_id=self.client_id,
response_type=response_type,
redirect_uri=redirect, **kw))
|
return the authorization url that the user should be redirected to.
|
train
|
https://github.com/michaelliao/sinaweibopy/blob/0f19dd71c1fbd16ee539620c7e9e986887f5c665/weibo.py#L259-L270
|
[
"def _encode_params(**kw):\n '''\n do url-encode parameters\n\n >>> _encode_params(a=1, b='R&D')\n 'a=1&b=R%26D'\n >>> _encode_params(a=u'\\u4e2d\\u6587', b=['A', 'B', 123])\n 'a=%E4%B8%AD%E6%96%87&b=A&b=B&b=123'\n '''\n args = []\n for k, v in kw.iteritems():\n if isinstance(v, basestring):\n qv = v.encode('utf-8') if isinstance(v, unicode) else v\n args.append('%s=%s' % (k, urllib.quote(qv)))\n elif isinstance(v, collections.Iterable):\n for i in v:\n qv = i.encode('utf-8') if isinstance(i, unicode) else str(i)\n args.append('%s=%s' % (k, urllib.quote(qv)))\n else:\n qv = str(v)\n args.append('%s=%s' % (k, urllib.quote(qv)))\n return '&'.join(args)\n"
] |
class APIClient(object):
'''
API client using synchronized invocation.
'''
def __init__(self, app_key, app_secret, redirect_uri=None, response_type='code', domain='api.weibo.com', version='2'):
self.client_id = str(app_key)
self.client_secret = str(app_secret)
self.redirect_uri = redirect_uri
self.response_type = response_type
self.auth_url = 'https://%s/oauth2/' % domain
self.api_url = 'https://%s/%s/' % (domain, version)
self.access_token = None
self.expires = 0.0
self.get = HttpObject(self, _HTTP_GET)
self.post = HttpObject(self, _HTTP_POST)
self.upload = HttpObject(self, _HTTP_UPLOAD)
def parse_signed_request(self, signed_request):
'''
parse signed request when using in-site app.
Returns:
dict object like { 'uid': 12345, 'access_token': 'ABC123XYZ', 'expires': unix-timestamp },
or None if parse failed.
'''
def _b64_normalize(s):
appendix = '=' * (4 - len(s) % 4)
return s.replace('-', '+').replace('_', '/') + appendix
sr = str(signed_request)
logging.info('parse signed request: %s' % sr)
enc_sig, enc_payload = sr.split('.', 1)
sig = base64.b64decode(_b64_normalize(enc_sig))
data = _parse_json(base64.b64decode(_b64_normalize(enc_payload)))
if data['algorithm'] != u'HMAC-SHA256':
return None
expected_sig = hmac.new(self.client_secret, enc_payload, hashlib.sha256).digest()
if expected_sig == sig:
data.user_id = data.uid = data.get('user_id', None)
data.access_token = data.get('oauth_token', None)
expires = data.get('expires', None)
if expires:
data.expires = data.expires_in = time.time() + expires
return data
return None
def set_access_token(self, access_token, expires):
self.access_token = str(access_token)
self.expires = float(expires)
def _parse_access_token(self, r):
'''
new:return access token as a JsonDict: {"access_token":"your-access-token","expires_in":12345678,"uid":1234}, expires_in is represented using standard unix-epoch-time
'''
current = int(time.time())
expires = r.expires_in + current
remind_in = r.get('remind_in', None)
if remind_in:
rtime = int(remind_in) + current
if rtime < expires:
expires = rtime
return JsonDict(access_token=r.access_token, expires=expires, expires_in=expires, uid=r.get('uid', None))
def request_access_token(self, code, redirect_uri=None):
redirect = redirect_uri if redirect_uri else self.redirect_uri
if not redirect:
raise APIError('21305', 'Parameter absent: redirect_uri', 'OAuth2 request')
r = _http_post('%s%s' % (self.auth_url, 'access_token'),
client_id=self.client_id,
client_secret=self.client_secret,
redirect_uri=redirect,
code=code,
grant_type='authorization_code')
return self._parse_access_token(r)
def refresh_token(self, refresh_token):
req_str = '%s%s' % (self.auth_url, 'access_token')
r = _http_post(req_str,
client_id=self.client_id,
client_secret=self.client_secret,
refresh_token=refresh_token,
grant_type='refresh_token')
return self._parse_access_token(r)
def is_expires(self):
return not self.access_token or time.time() > self.expires
def __getattr__(self, attr):
if '__' in attr:
return getattr(self.get, attr)
return _Callable(self, attr)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.